var/home/core/zuul-output/0000755000175000017500000000000015117205363014530 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015117214453015474 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004647627615117214445017727 0ustar rootrootDec 13 06:45:36 crc systemd[1]: Starting Kubernetes Kubelet... Dec 13 06:45:37 crc restorecon[4627]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 06:45:37 crc restorecon[4627]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 06:45:37 crc restorecon[4627]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 13 06:45:38 crc kubenswrapper[4644]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 13 06:45:38 crc kubenswrapper[4644]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 13 06:45:38 crc kubenswrapper[4644]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 13 06:45:38 crc kubenswrapper[4644]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 13 06:45:38 crc kubenswrapper[4644]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 13 06:45:38 crc kubenswrapper[4644]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.249861 4644 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253454 4644 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253509 4644 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253518 4644 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253523 4644 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253527 4644 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253622 4644 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253627 4644 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253632 4644 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253637 4644 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253642 4644 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253645 4644 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253649 4644 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253658 4644 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253662 4644 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253669 4644 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253673 4644 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253677 4644 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253680 4644 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253684 4644 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253687 4644 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253693 4644 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253697 4644 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253700 4644 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253706 4644 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253710 4644 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253714 4644 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253721 4644 feature_gate.go:330] unrecognized feature gate: Example Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253725 4644 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253730 4644 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253735 4644 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253740 4644 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253743 4644 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253746 4644 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253750 4644 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253754 4644 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253757 4644 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253760 4644 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253764 4644 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253770 4644 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253773 4644 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253776 4644 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253780 4644 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253783 4644 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253786 4644 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253789 4644 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253793 4644 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253796 4644 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253799 4644 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253804 4644 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253812 4644 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253819 4644 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253823 4644 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253827 4644 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253831 4644 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253835 4644 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253839 4644 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253843 4644 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253847 4644 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253851 4644 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253855 4644 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253859 4644 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253865 4644 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253868 4644 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253871 4644 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253875 4644 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253878 4644 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253881 4644 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253884 4644 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253887 4644 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253891 4644 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.253894 4644 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254021 4644 flags.go:64] FLAG: --address="0.0.0.0" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254030 4644 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254039 4644 flags.go:64] FLAG: --anonymous-auth="true" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254044 4644 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254050 4644 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254053 4644 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254058 4644 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254067 4644 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254072 4644 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254076 4644 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254080 4644 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254084 4644 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254088 4644 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254094 4644 flags.go:64] FLAG: --cgroup-root="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254099 4644 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254106 4644 flags.go:64] FLAG: --client-ca-file="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254110 4644 flags.go:64] FLAG: --cloud-config="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254115 4644 flags.go:64] FLAG: --cloud-provider="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254119 4644 flags.go:64] FLAG: --cluster-dns="[]" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254124 4644 flags.go:64] FLAG: --cluster-domain="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254128 4644 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254132 4644 flags.go:64] FLAG: --config-dir="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254137 4644 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254142 4644 flags.go:64] FLAG: --container-log-max-files="5" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254150 4644 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254155 4644 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254160 4644 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254164 4644 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254168 4644 flags.go:64] FLAG: --contention-profiling="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254172 4644 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254176 4644 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254180 4644 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254187 4644 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254207 4644 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254211 4644 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254215 4644 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254220 4644 flags.go:64] FLAG: --enable-load-reader="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254224 4644 flags.go:64] FLAG: --enable-server="true" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254228 4644 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254238 4644 flags.go:64] FLAG: --event-burst="100" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254243 4644 flags.go:64] FLAG: --event-qps="50" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254250 4644 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254255 4644 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254258 4644 flags.go:64] FLAG: --eviction-hard="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254263 4644 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254289 4644 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254295 4644 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254299 4644 flags.go:64] FLAG: --eviction-soft="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254303 4644 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254310 4644 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254316 4644 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254321 4644 flags.go:64] FLAG: --experimental-mounter-path="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254325 4644 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254329 4644 flags.go:64] FLAG: --fail-swap-on="true" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254333 4644 flags.go:64] FLAG: --feature-gates="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254338 4644 flags.go:64] FLAG: --file-check-frequency="20s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254342 4644 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254349 4644 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254515 4644 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254532 4644 flags.go:64] FLAG: --healthz-port="10248" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254537 4644 flags.go:64] FLAG: --help="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254685 4644 flags.go:64] FLAG: --hostname-override="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254697 4644 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254702 4644 flags.go:64] FLAG: --http-check-frequency="20s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254707 4644 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254711 4644 flags.go:64] FLAG: --image-credential-provider-config="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254714 4644 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254718 4644 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254722 4644 flags.go:64] FLAG: --image-service-endpoint="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254726 4644 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254730 4644 flags.go:64] FLAG: --kube-api-burst="100" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254734 4644 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254738 4644 flags.go:64] FLAG: --kube-api-qps="50" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254741 4644 flags.go:64] FLAG: --kube-reserved="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254746 4644 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254749 4644 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254753 4644 flags.go:64] FLAG: --kubelet-cgroups="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254757 4644 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254760 4644 flags.go:64] FLAG: --lock-file="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254764 4644 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254768 4644 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254772 4644 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254778 4644 flags.go:64] FLAG: --log-json-split-stream="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254782 4644 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254785 4644 flags.go:64] FLAG: --log-text-split-stream="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254789 4644 flags.go:64] FLAG: --logging-format="text" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254792 4644 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254796 4644 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254801 4644 flags.go:64] FLAG: --manifest-url="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254805 4644 flags.go:64] FLAG: --manifest-url-header="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254811 4644 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254815 4644 flags.go:64] FLAG: --max-open-files="1000000" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254821 4644 flags.go:64] FLAG: --max-pods="110" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254825 4644 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254829 4644 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254833 4644 flags.go:64] FLAG: --memory-manager-policy="None" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254837 4644 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254841 4644 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254845 4644 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254848 4644 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254860 4644 flags.go:64] FLAG: --node-status-max-images="50" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254864 4644 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254868 4644 flags.go:64] FLAG: --oom-score-adj="-999" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254871 4644 flags.go:64] FLAG: --pod-cidr="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254875 4644 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254881 4644 flags.go:64] FLAG: --pod-manifest-path="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254885 4644 flags.go:64] FLAG: --pod-max-pids="-1" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254889 4644 flags.go:64] FLAG: --pods-per-core="0" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254892 4644 flags.go:64] FLAG: --port="10250" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254897 4644 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254901 4644 flags.go:64] FLAG: --provider-id="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254904 4644 flags.go:64] FLAG: --qos-reserved="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254908 4644 flags.go:64] FLAG: --read-only-port="10255" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254912 4644 flags.go:64] FLAG: --register-node="true" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254916 4644 flags.go:64] FLAG: --register-schedulable="true" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254919 4644 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254927 4644 flags.go:64] FLAG: --registry-burst="10" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254931 4644 flags.go:64] FLAG: --registry-qps="5" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254934 4644 flags.go:64] FLAG: --reserved-cpus="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254938 4644 flags.go:64] FLAG: --reserved-memory="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254943 4644 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254947 4644 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254952 4644 flags.go:64] FLAG: --rotate-certificates="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254956 4644 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254960 4644 flags.go:64] FLAG: --runonce="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254965 4644 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254969 4644 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254972 4644 flags.go:64] FLAG: --seccomp-default="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254976 4644 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254980 4644 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254984 4644 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254988 4644 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254992 4644 flags.go:64] FLAG: --storage-driver-password="root" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254996 4644 flags.go:64] FLAG: --storage-driver-secure="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.254999 4644 flags.go:64] FLAG: --storage-driver-table="stats" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255003 4644 flags.go:64] FLAG: --storage-driver-user="root" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255007 4644 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255011 4644 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255015 4644 flags.go:64] FLAG: --system-cgroups="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255019 4644 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255026 4644 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255030 4644 flags.go:64] FLAG: --tls-cert-file="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255033 4644 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255039 4644 flags.go:64] FLAG: --tls-min-version="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255042 4644 flags.go:64] FLAG: --tls-private-key-file="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255046 4644 flags.go:64] FLAG: --topology-manager-policy="none" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255050 4644 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255054 4644 flags.go:64] FLAG: --topology-manager-scope="container" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255058 4644 flags.go:64] FLAG: --v="2" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255063 4644 flags.go:64] FLAG: --version="false" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255070 4644 flags.go:64] FLAG: --vmodule="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255082 4644 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255087 4644 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255175 4644 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255181 4644 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255187 4644 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255209 4644 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255213 4644 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255218 4644 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255224 4644 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255228 4644 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255232 4644 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255236 4644 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255239 4644 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255242 4644 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255247 4644 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255251 4644 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255255 4644 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255258 4644 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255262 4644 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255265 4644 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255269 4644 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255272 4644 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255276 4644 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255279 4644 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255283 4644 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255287 4644 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255290 4644 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255294 4644 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255297 4644 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255300 4644 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255303 4644 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255307 4644 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255310 4644 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255313 4644 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255316 4644 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255320 4644 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255323 4644 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255330 4644 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255333 4644 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255336 4644 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255340 4644 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255344 4644 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255348 4644 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255352 4644 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255356 4644 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255359 4644 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255363 4644 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255366 4644 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255370 4644 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255373 4644 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255376 4644 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255380 4644 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255383 4644 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255387 4644 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255390 4644 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255393 4644 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255397 4644 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255401 4644 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255405 4644 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255408 4644 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255412 4644 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255415 4644 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255418 4644 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255422 4644 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255425 4644 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255428 4644 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255431 4644 feature_gate.go:330] unrecognized feature gate: Example Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255435 4644 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255457 4644 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255466 4644 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255469 4644 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255472 4644 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.255475 4644 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.255481 4644 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.262894 4644 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.262948 4644 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263058 4644 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263074 4644 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263079 4644 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263083 4644 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263088 4644 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263092 4644 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263095 4644 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263099 4644 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263103 4644 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263106 4644 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263109 4644 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263113 4644 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263118 4644 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263125 4644 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263128 4644 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263132 4644 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263135 4644 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263138 4644 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263141 4644 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263144 4644 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263148 4644 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263152 4644 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263156 4644 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263162 4644 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263166 4644 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263171 4644 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263176 4644 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263180 4644 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263183 4644 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263187 4644 feature_gate.go:330] unrecognized feature gate: Example Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263190 4644 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263210 4644 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263214 4644 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263217 4644 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263222 4644 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263225 4644 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263230 4644 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263236 4644 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263240 4644 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263243 4644 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263247 4644 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263250 4644 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263254 4644 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263257 4644 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263262 4644 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263265 4644 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263270 4644 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263274 4644 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263277 4644 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263281 4644 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263284 4644 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263287 4644 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263290 4644 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263293 4644 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263297 4644 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263300 4644 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263305 4644 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263309 4644 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263312 4644 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263315 4644 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263318 4644 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263321 4644 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263325 4644 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263328 4644 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263331 4644 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263334 4644 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263337 4644 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263342 4644 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263346 4644 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263350 4644 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263354 4644 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.263360 4644 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263514 4644 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263522 4644 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263527 4644 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263531 4644 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263535 4644 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263539 4644 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263542 4644 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263546 4644 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263549 4644 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263554 4644 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263558 4644 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263562 4644 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263565 4644 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263568 4644 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263572 4644 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263577 4644 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263581 4644 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263586 4644 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263590 4644 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263594 4644 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263598 4644 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263601 4644 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263604 4644 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263608 4644 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263611 4644 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263614 4644 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263617 4644 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263621 4644 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263624 4644 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263627 4644 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263630 4644 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263634 4644 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263637 4644 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263640 4644 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263645 4644 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263648 4644 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263653 4644 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263658 4644 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263662 4644 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263665 4644 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263669 4644 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263672 4644 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263676 4644 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263679 4644 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263683 4644 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263687 4644 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263691 4644 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263695 4644 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263699 4644 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263703 4644 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263706 4644 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263710 4644 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263713 4644 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263716 4644 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263720 4644 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263723 4644 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263726 4644 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263730 4644 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263733 4644 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263736 4644 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263739 4644 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263743 4644 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263746 4644 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263749 4644 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263753 4644 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263756 4644 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263759 4644 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263763 4644 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263766 4644 feature_gate.go:330] unrecognized feature gate: Example Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263769 4644 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.263773 4644 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.263779 4644 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.263971 4644 server.go:940] "Client rotation is on, will bootstrap in background" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.267033 4644 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.267124 4644 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.268048 4644 server.go:997] "Starting client certificate rotation" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.268076 4644 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.268575 4644 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-26 02:31:46.915192971 +0000 UTC Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.268655 4644 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 307h46m8.646541006s for next certificate rotation Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.284618 4644 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.285927 4644 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.295347 4644 log.go:25] "Validated CRI v1 runtime API" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.313768 4644 log.go:25] "Validated CRI v1 image API" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.315972 4644 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.321678 4644 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-13-06-42-01-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.321713 4644 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:49 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm:{mountpoint:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm major:0 minor:42 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:50 fsType:tmpfs blockSize:0} overlay_0-43:{mountpoint:/var/lib/containers/storage/overlay/94b752e0a51c0134b00ddef6dc7a933a9d7c1d9bdc88a18dae4192a0d557d623/merged major:0 minor:43 fsType:overlay blockSize:0}] Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.337075 4644 manager.go:217] Machine: {Timestamp:2025-12-13 06:45:38.334856616 +0000 UTC m=+0.549807469 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2445404 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:b4ea0d06-22aa-4091-83fc-aeb05ba823a8 BootID:d67b613b-3746-4fbb-91d0-cb6f6c249fb5 Filesystems:[{Device:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm DeviceMajor:0 DeviceMinor:42 Capacity:65536000 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:49 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:overlay_0-43 DeviceMajor:0 DeviceMinor:43 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:50 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:d3:1a:5e Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:enp3s0 MacAddress:fa:16:3e:d3:1a:5e Speed:-1 Mtu:1500} {Name:enp7s0 MacAddress:fa:16:3e:38:a3:cb Speed:-1 Mtu:1440} {Name:enp7s0.20 MacAddress:52:54:00:2b:01:50 Speed:-1 Mtu:1436} {Name:enp7s0.21 MacAddress:52:54:00:a5:61:1e Speed:-1 Mtu:1436} {Name:enp7s0.22 MacAddress:52:54:00:78:f7:53 Speed:-1 Mtu:1436} {Name:enp7s0.23 MacAddress:52:54:00:28:7e:ff Speed:-1 Mtu:1436} {Name:eth10 MacAddress:de:93:6a:bc:96:79 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:56:94:3b:ed:c4:a7 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:65536 Type:Data Level:1} {Id:0 Size:65536 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:65536 Type:Data Level:1} {Id:1 Size:65536 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:65536 Type:Data Level:1} {Id:10 Size:65536 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:65536 Type:Data Level:1} {Id:11 Size:65536 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:65536 Type:Data Level:1} {Id:2 Size:65536 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:65536 Type:Data Level:1} {Id:3 Size:65536 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:65536 Type:Data Level:1} {Id:4 Size:65536 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:65536 Type:Data Level:1} {Id:5 Size:65536 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:65536 Type:Data Level:1} {Id:6 Size:65536 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:65536 Type:Data Level:1} {Id:7 Size:65536 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:65536 Type:Data Level:1} {Id:8 Size:65536 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:65536 Type:Data Level:1} {Id:9 Size:65536 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.337271 4644 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.337422 4644 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.337755 4644 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.337936 4644 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.337972 4644 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.338143 4644 topology_manager.go:138] "Creating topology manager with none policy" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.338153 4644 container_manager_linux.go:303] "Creating device plugin manager" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.338515 4644 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.338535 4644 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.339076 4644 state_mem.go:36] "Initialized new in-memory state store" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.339159 4644 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.340992 4644 kubelet.go:418] "Attempting to sync node with API server" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.341011 4644 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.341031 4644 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.341041 4644 kubelet.go:324] "Adding apiserver pod source" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.341052 4644 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.343190 4644 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.343784 4644 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.344904 4644 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:45:38 crc kubenswrapper[4644]: E1213 06:45:38.344969 4644 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.344905 4644 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:45:38 crc kubenswrapper[4644]: E1213 06:45:38.345007 4644 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.345087 4644 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.346060 4644 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.346083 4644 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.346092 4644 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.346099 4644 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.346111 4644 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.346118 4644 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.346125 4644 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.346135 4644 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.346141 4644 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.346148 4644 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.346167 4644 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.346175 4644 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.347315 4644 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.347807 4644 server.go:1280] "Started kubelet" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.348167 4644 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.348366 4644 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.348378 4644 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 13 06:45:38 crc systemd[1]: Started Kubernetes Kubelet. Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.351431 4644 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.351489 4644 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.352438 4644 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.352501 4644 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 13 06:45:38 crc kubenswrapper[4644]: E1213 06:45:38.352675 4644 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 06:45:38 crc kubenswrapper[4644]: E1213 06:45:38.352801 4644 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="200ms" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.352929 4644 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.353088 4644 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 03:18:45.172256989 +0000 UTC Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.353144 4644 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 284h33m6.819116996s for next certificate rotation Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.353151 4644 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.353692 4644 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:45:38 crc kubenswrapper[4644]: E1213 06:45:38.354031 4644 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 13 06:45:38 crc kubenswrapper[4644]: E1213 06:45:38.353408 4644 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 192.168.25.89:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1880b36ddbafe824 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-13 06:45:38.347780132 +0000 UTC m=+0.562730965,LastTimestamp:2025-12-13 06:45:38.347780132 +0000 UTC m=+0.562730965,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.354958 4644 factory.go:55] Registering systemd factory Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.356108 4644 factory.go:221] Registration of the systemd container factory successfully Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.357485 4644 server.go:460] "Adding debug handlers to kubelet server" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.358475 4644 factory.go:153] Registering CRI-O factory Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.358500 4644 factory.go:221] Registration of the crio container factory successfully Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.358559 4644 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.358582 4644 factory.go:103] Registering Raw factory Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.358600 4644 manager.go:1196] Started watching for new ooms in manager Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.359106 4644 manager.go:319] Starting recovery of all containers Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361693 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361751 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361763 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361774 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361784 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361792 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361803 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361812 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361823 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361831 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361841 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361850 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361859 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361871 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361880 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361888 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361896 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361904 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361912 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361921 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361932 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361941 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361949 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361958 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361966 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361976 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361988 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.361996 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362024 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362033 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362041 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362049 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362059 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362067 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362077 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362085 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362094 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362103 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362113 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362126 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362135 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362145 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362155 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362165 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362174 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362182 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362190 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362212 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362221 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362228 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362237 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362248 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362260 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362270 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362278 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362289 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362298 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362309 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362317 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362327 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362335 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362344 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362354 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362362 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362370 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362380 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362389 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362402 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362410 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362419 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.362428 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.363892 4644 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.363942 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.363960 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.363973 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.363984 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364009 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364021 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364031 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364041 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364052 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364060 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364069 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364079 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364090 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364099 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364109 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364119 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364129 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364141 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364150 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364160 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364169 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364178 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364187 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364214 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364226 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364236 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364247 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364256 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364265 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364274 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364284 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364292 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364302 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364319 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364332 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364345 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364359 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364370 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364382 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364391 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364401 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364412 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364423 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364432 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364464 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364474 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364483 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364492 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364500 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364515 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364525 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364535 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364544 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364554 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364563 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364573 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364581 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364591 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364600 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364610 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364619 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364628 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364637 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364648 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364657 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364667 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364677 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364686 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364694 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364705 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364713 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364721 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364730 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364738 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364747 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364756 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364767 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364777 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364787 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364834 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364848 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364860 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364868 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364878 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364888 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364899 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364908 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364919 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364930 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364939 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364949 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364957 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364966 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364975 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364985 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.364993 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365001 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365012 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365022 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365033 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365044 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365055 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365065 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365075 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365085 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365094 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365103 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365111 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365120 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365129 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365138 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365147 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365157 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365166 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365174 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365182 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365191 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365221 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365232 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365241 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365250 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365258 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365267 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365276 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365285 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365296 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365305 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365313 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365322 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365331 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365339 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365348 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365358 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365366 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365377 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365385 4644 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365393 4644 reconstruct.go:97] "Volume reconstruction finished" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.365400 4644 reconciler.go:26] "Reconciler: start to sync state" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.373635 4644 manager.go:324] Recovery completed Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.383820 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.385027 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.385066 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.385076 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.385906 4644 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.385925 4644 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.385944 4644 state_mem.go:36] "Initialized new in-memory state store" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.386614 4644 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.387856 4644 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.387939 4644 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.388010 4644 kubelet.go:2335] "Starting kubelet main sync loop" Dec 13 06:45:38 crc kubenswrapper[4644]: E1213 06:45:38.388091 4644 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.392588 4644 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:45:38 crc kubenswrapper[4644]: E1213 06:45:38.392652 4644 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.397612 4644 policy_none.go:49] "None policy: Start" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.398500 4644 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.398523 4644 state_mem.go:35] "Initializing new in-memory state store" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.445550 4644 manager.go:334] "Starting Device Plugin manager" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.445906 4644 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.445918 4644 server.go:79] "Starting device plugin registration server" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.446365 4644 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.446469 4644 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.446682 4644 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.446862 4644 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.446876 4644 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 13 06:45:38 crc kubenswrapper[4644]: E1213 06:45:38.452258 4644 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.488552 4644 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.488700 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.489782 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.489825 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.489836 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.489978 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.490188 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.490254 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.490993 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.491021 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.491032 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.491020 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.491100 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.491115 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.491231 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.491432 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.491540 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.492081 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.492117 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.492132 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.492278 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.492374 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.492404 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.493247 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.493272 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.493281 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.493248 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.493460 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.493487 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.493567 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.493613 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.493634 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.493782 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.493849 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.493895 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.494571 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.494596 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.494607 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.494596 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.494650 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.494659 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.494821 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.494868 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.495476 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.495500 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.495508 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.547125 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.548725 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.548775 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.548787 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.548820 4644 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 06:45:38 crc kubenswrapper[4644]: E1213 06:45:38.549399 4644 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.89:6443: connect: connection refused" node="crc" Dec 13 06:45:38 crc kubenswrapper[4644]: E1213 06:45:38.554004 4644 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="400ms" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.567723 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.567772 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.567792 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.567809 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.567828 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.567902 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.567953 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.568169 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.568255 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.568287 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.568531 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.568567 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.568584 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.568600 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.568615 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.669733 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.669795 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.669820 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.669838 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.669855 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.669872 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.669886 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.669899 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.669917 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.669933 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.669947 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.669944 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.669961 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.669974 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.669990 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.670019 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.670028 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.670053 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.670078 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.670101 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.670123 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.670143 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.670165 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.670187 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.670221 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.670253 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.670280 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.670302 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.670324 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.670345 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.750266 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.751612 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.751657 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.751666 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.751694 4644 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 06:45:38 crc kubenswrapper[4644]: E1213 06:45:38.752204 4644 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.89:6443: connect: connection refused" node="crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.817506 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.821676 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.836827 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.841662 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-18a6ab00a283cbe21735f729f94308440cbfc930cd046dcf5529fd195f4e6c08 WatchSource:0}: Error finding container 18a6ab00a283cbe21735f729f94308440cbfc930cd046dcf5529fd195f4e6c08: Status 404 returned error can't find the container with id 18a6ab00a283cbe21735f729f94308440cbfc930cd046dcf5529fd195f4e6c08 Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.843697 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-d0770c50a76a17170a0a6164035311ded6c347a5717a485f6e357a9b269824e3 WatchSource:0}: Error finding container d0770c50a76a17170a0a6164035311ded6c347a5717a485f6e357a9b269824e3: Status 404 returned error can't find the container with id d0770c50a76a17170a0a6164035311ded6c347a5717a485f6e357a9b269824e3 Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.852908 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-0698c22380c781c0a30d561f0a7f75b8ec06a1d2ce318c640e4c808e4188b057 WatchSource:0}: Error finding container 0698c22380c781c0a30d561f0a7f75b8ec06a1d2ce318c640e4c808e4188b057: Status 404 returned error can't find the container with id 0698c22380c781c0a30d561f0a7f75b8ec06a1d2ce318c640e4c808e4188b057 Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.869022 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: I1213 06:45:38.870795 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.879290 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-bb9e97c3b8c3393faf77353d980f074a080d83c8e5cf1d2aeae707767502dddd WatchSource:0}: Error finding container bb9e97c3b8c3393faf77353d980f074a080d83c8e5cf1d2aeae707767502dddd: Status 404 returned error can't find the container with id bb9e97c3b8c3393faf77353d980f074a080d83c8e5cf1d2aeae707767502dddd Dec 13 06:45:38 crc kubenswrapper[4644]: W1213 06:45:38.879957 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-55b10e77e9c54aad79e6451743be00d2b9a1b35cb3d719ca022e77e7a7d9372a WatchSource:0}: Error finding container 55b10e77e9c54aad79e6451743be00d2b9a1b35cb3d719ca022e77e7a7d9372a: Status 404 returned error can't find the container with id 55b10e77e9c54aad79e6451743be00d2b9a1b35cb3d719ca022e77e7a7d9372a Dec 13 06:45:38 crc kubenswrapper[4644]: E1213 06:45:38.954630 4644 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="800ms" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.153354 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.154483 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.154531 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.154542 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.154567 4644 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 06:45:39 crc kubenswrapper[4644]: E1213 06:45:39.155035 4644 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.89:6443: connect: connection refused" node="crc" Dec 13 06:45:39 crc kubenswrapper[4644]: W1213 06:45:39.238402 4644 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:45:39 crc kubenswrapper[4644]: E1213 06:45:39.238502 4644 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 13 06:45:39 crc kubenswrapper[4644]: W1213 06:45:39.261670 4644 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:45:39 crc kubenswrapper[4644]: E1213 06:45:39.261982 4644 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.350285 4644 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.392892 4644 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52" exitCode=0 Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.392979 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52"} Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.393088 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"55b10e77e9c54aad79e6451743be00d2b9a1b35cb3d719ca022e77e7a7d9372a"} Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.393212 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.394411 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.394457 4644 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3" exitCode=0 Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.394478 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.394490 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.394539 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3"} Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.394568 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"bb9e97c3b8c3393faf77353d980f074a080d83c8e5cf1d2aeae707767502dddd"} Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.394649 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.395325 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.395361 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.395372 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.396112 4644 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68" exitCode=0 Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.396148 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68"} Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.396192 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0698c22380c781c0a30d561f0a7f75b8ec06a1d2ce318c640e4c808e4188b057"} Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.396309 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.397197 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.397232 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.397256 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.398123 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe"} Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.398156 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d0770c50a76a17170a0a6164035311ded6c347a5717a485f6e357a9b269824e3"} Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.398589 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.399606 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.399630 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.399640 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.399672 4644 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="2c6ec51442662952578aad70318e2b39d2717079ddc2d103a9e4fb3187d3aedb" exitCode=0 Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.399699 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"2c6ec51442662952578aad70318e2b39d2717079ddc2d103a9e4fb3187d3aedb"} Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.399718 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"18a6ab00a283cbe21735f729f94308440cbfc930cd046dcf5529fd195f4e6c08"} Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.399773 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.400363 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.400399 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.400413 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:39 crc kubenswrapper[4644]: W1213 06:45:39.481788 4644 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:45:39 crc kubenswrapper[4644]: E1213 06:45:39.481872 4644 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 13 06:45:39 crc kubenswrapper[4644]: E1213 06:45:39.755822 4644 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="1.6s" Dec 13 06:45:39 crc kubenswrapper[4644]: W1213 06:45:39.914979 4644 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:45:39 crc kubenswrapper[4644]: E1213 06:45:39.915063 4644 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.25.89:6443: connect: connection refused" logger="UnhandledError" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.955396 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.957560 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.957602 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.957612 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:39 crc kubenswrapper[4644]: I1213 06:45:39.957637 4644 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 06:45:39 crc kubenswrapper[4644]: E1213 06:45:39.958173 4644 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.89:6443: connect: connection refused" node="crc" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.403947 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"0c0d566458ec1016632158dd447399b406bc7e4ac5dbf054980a82f18d685b31"} Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.404064 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.404944 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.404989 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.404999 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.406349 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c421321fe13141866ef6b2747c48845a897a7ea4238c16e92cbcd33fa2f787c0"} Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.406464 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"bc0d51af45093a810195e31aa1e43043ebc2c967fb2f2cc68683a6863d72e889"} Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.406483 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2f7cf602b6144f7e94214304ab850074ba871148dc0165f21cb9a8cfba8d06c9"} Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.406542 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.407227 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.407261 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.407271 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.408852 4644 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d" exitCode=0 Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.408921 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d"} Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.409074 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.409870 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.409896 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.409906 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.412693 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc"} Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.412737 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638"} Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.412750 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2"} Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.412761 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334"} Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.412762 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.412771 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844"} Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.413812 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.413840 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.413851 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.414998 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec"} Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.415026 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be"} Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.415038 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d"} Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.415048 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.415653 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.415688 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:40 crc kubenswrapper[4644]: I1213 06:45:40.415699 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.421648 4644 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e" exitCode=0 Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.421728 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e"} Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.421767 4644 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.421799 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.421806 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.421830 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.422689 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.422715 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.422725 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.422738 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.422766 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.422774 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.423486 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.423513 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.423524 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.558525 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.559525 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.559566 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.559576 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.559604 4644 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.637318 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.742642 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:41 crc kubenswrapper[4644]: I1213 06:45:41.895254 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.428021 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c"} Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.428067 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec"} Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.428080 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0"} Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.428089 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605"} Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.428093 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.428171 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.428097 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79"} Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.428534 4644 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.428560 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.428920 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.428948 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.428957 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.429004 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.429039 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.429049 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.429477 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.429503 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:42 crc kubenswrapper[4644]: I1213 06:45:42.429512 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.414224 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.431186 4644 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.431223 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.431227 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.431224 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.432134 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.432162 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.432170 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.432178 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.432134 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.432213 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.432226 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.432187 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.432180 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.923062 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:45:43 crc kubenswrapper[4644]: I1213 06:45:43.930060 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:45:44 crc kubenswrapper[4644]: I1213 06:45:44.433162 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:44 crc kubenswrapper[4644]: I1213 06:45:44.434140 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:44 crc kubenswrapper[4644]: I1213 06:45:44.434192 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:44 crc kubenswrapper[4644]: I1213 06:45:44.434203 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:44 crc kubenswrapper[4644]: I1213 06:45:44.896153 4644 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 13 06:45:44 crc kubenswrapper[4644]: I1213 06:45:44.896233 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 13 06:45:45 crc kubenswrapper[4644]: I1213 06:45:45.406262 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 13 06:45:45 crc kubenswrapper[4644]: I1213 06:45:45.406500 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:45 crc kubenswrapper[4644]: I1213 06:45:45.407564 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:45 crc kubenswrapper[4644]: I1213 06:45:45.407595 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:45 crc kubenswrapper[4644]: I1213 06:45:45.407604 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:45 crc kubenswrapper[4644]: I1213 06:45:45.435541 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:45 crc kubenswrapper[4644]: I1213 06:45:45.436509 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:45 crc kubenswrapper[4644]: I1213 06:45:45.436543 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:45 crc kubenswrapper[4644]: I1213 06:45:45.436553 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:46 crc kubenswrapper[4644]: I1213 06:45:46.225741 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 06:45:46 crc kubenswrapper[4644]: I1213 06:45:46.225944 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:46 crc kubenswrapper[4644]: I1213 06:45:46.227096 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:46 crc kubenswrapper[4644]: I1213 06:45:46.227127 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:46 crc kubenswrapper[4644]: I1213 06:45:46.227138 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:46 crc kubenswrapper[4644]: I1213 06:45:46.679588 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:45:46 crc kubenswrapper[4644]: I1213 06:45:46.679775 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:46 crc kubenswrapper[4644]: I1213 06:45:46.680793 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:46 crc kubenswrapper[4644]: I1213 06:45:46.680859 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:46 crc kubenswrapper[4644]: I1213 06:45:46.680870 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:46 crc kubenswrapper[4644]: I1213 06:45:46.926119 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:46 crc kubenswrapper[4644]: I1213 06:45:46.926346 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:46 crc kubenswrapper[4644]: I1213 06:45:46.927424 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:46 crc kubenswrapper[4644]: I1213 06:45:46.927504 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:46 crc kubenswrapper[4644]: I1213 06:45:46.927515 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:48 crc kubenswrapper[4644]: E1213 06:45:48.452360 4644 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 13 06:45:48 crc kubenswrapper[4644]: I1213 06:45:48.964931 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 13 06:45:48 crc kubenswrapper[4644]: I1213 06:45:48.965117 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:48 crc kubenswrapper[4644]: I1213 06:45:48.966188 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:48 crc kubenswrapper[4644]: I1213 06:45:48.966233 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:48 crc kubenswrapper[4644]: I1213 06:45:48.966244 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:50 crc kubenswrapper[4644]: I1213 06:45:50.195367 4644 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 13 06:45:50 crc kubenswrapper[4644]: I1213 06:45:50.195434 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 13 06:45:50 crc kubenswrapper[4644]: I1213 06:45:50.199403 4644 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 13 06:45:50 crc kubenswrapper[4644]: I1213 06:45:50.199485 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 13 06:45:51 crc kubenswrapper[4644]: I1213 06:45:51.640709 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:45:51 crc kubenswrapper[4644]: I1213 06:45:51.640853 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:51 crc kubenswrapper[4644]: I1213 06:45:51.641854 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:51 crc kubenswrapper[4644]: I1213 06:45:51.641887 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:51 crc kubenswrapper[4644]: I1213 06:45:51.641898 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:53 crc kubenswrapper[4644]: I1213 06:45:53.419149 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:53 crc kubenswrapper[4644]: I1213 06:45:53.419308 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:53 crc kubenswrapper[4644]: I1213 06:45:53.419722 4644 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 13 06:45:53 crc kubenswrapper[4644]: I1213 06:45:53.419816 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 13 06:45:53 crc kubenswrapper[4644]: I1213 06:45:53.420533 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:53 crc kubenswrapper[4644]: I1213 06:45:53.420570 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:53 crc kubenswrapper[4644]: I1213 06:45:53.420582 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:53 crc kubenswrapper[4644]: I1213 06:45:53.422428 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:53 crc kubenswrapper[4644]: I1213 06:45:53.451074 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:53 crc kubenswrapper[4644]: I1213 06:45:53.451330 4644 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 13 06:45:53 crc kubenswrapper[4644]: I1213 06:45:53.451383 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 13 06:45:53 crc kubenswrapper[4644]: I1213 06:45:53.451893 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:53 crc kubenswrapper[4644]: I1213 06:45:53.451955 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:53 crc kubenswrapper[4644]: I1213 06:45:53.451967 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:54 crc kubenswrapper[4644]: I1213 06:45:54.484563 4644 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 13 06:45:54 crc kubenswrapper[4644]: I1213 06:45:54.484626 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 13 06:45:54 crc kubenswrapper[4644]: I1213 06:45:54.896290 4644 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 13 06:45:54 crc kubenswrapper[4644]: I1213 06:45:54.896366 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.187751 4644 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.187908 4644 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.187974 4644 trace.go:236] Trace[5398064]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Dec-2025 06:45:42.426) (total time: 12761ms): Dec 13 06:45:55 crc kubenswrapper[4644]: Trace[5398064]: ---"Objects listed" error: 12760ms (06:45:55.187) Dec 13 06:45:55 crc kubenswrapper[4644]: Trace[5398064]: [12.761090113s] [12.761090113s] END Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.187995 4644 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.189394 4644 trace.go:236] Trace[100642928]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Dec-2025 06:45:41.012) (total time: 14177ms): Dec 13 06:45:55 crc kubenswrapper[4644]: Trace[100642928]: ---"Objects listed" error: 14177ms (06:45:55.189) Dec 13 06:45:55 crc kubenswrapper[4644]: Trace[100642928]: [14.177116253s] [14.177116253s] END Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.189413 4644 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.189573 4644 trace.go:236] Trace[1963097382]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Dec-2025 06:45:41.754) (total time: 13434ms): Dec 13 06:45:55 crc kubenswrapper[4644]: Trace[1963097382]: ---"Objects listed" error: 13434ms (06:45:55.189) Dec 13 06:45:55 crc kubenswrapper[4644]: Trace[1963097382]: [13.434558144s] [13.434558144s] END Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.189617 4644 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.190767 4644 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.191398 4644 trace.go:236] Trace[1717846062]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Dec-2025 06:45:41.739) (total time: 13451ms): Dec 13 06:45:55 crc kubenswrapper[4644]: Trace[1717846062]: ---"Objects listed" error: 13451ms (06:45:55.191) Dec 13 06:45:55 crc kubenswrapper[4644]: Trace[1717846062]: [13.451531969s] [13.451531969s] END Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.191435 4644 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.349772 4644 apiserver.go:52] "Watching apiserver" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.352677 4644 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.352931 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.353390 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.353825 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.353851 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.353873 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.353883 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.353917 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.353929 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.353987 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.354107 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.355085 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.355311 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.355368 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.355584 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.356787 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.356941 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.357226 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.357270 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.357356 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.384674 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.395572 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.402884 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.411506 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.420712 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.428677 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.435496 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.453909 4644 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.456138 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.457830 4644 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc" exitCode=255 Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.457875 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc"} Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.466504 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.469489 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.469698 4644 scope.go:117] "RemoveContainer" containerID="b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.475396 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.487581 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490242 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490319 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490344 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490362 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490380 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490397 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490412 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490818 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490426 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490891 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490910 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490901 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490931 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490951 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490971 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490988 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491008 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491037 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491063 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491080 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491100 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491122 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491140 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491156 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491172 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491187 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491204 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491220 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491236 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491251 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491979 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491998 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492036 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492053 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492071 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492088 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492107 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.490904 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492192 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491061 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491184 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492222 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492246 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492616 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492649 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492665 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492682 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492701 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492718 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492779 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492813 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492828 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492876 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492894 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492909 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492977 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492995 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493011 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493044 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493062 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493080 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493097 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493121 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493139 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493156 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493171 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493188 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493203 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493220 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493237 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493256 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493338 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493359 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493377 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493392 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493408 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493424 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493453 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493475 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493492 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493509 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493527 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493546 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493561 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493577 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493593 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493612 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493631 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493647 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493664 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493680 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493695 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493713 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493728 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493744 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493759 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493778 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493796 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493814 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493830 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493852 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494605 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494636 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494658 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494675 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494694 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494713 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494730 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494746 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494762 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494778 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494796 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494812 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494830 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494847 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494863 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495553 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495720 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495740 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495757 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495775 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495795 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495812 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495829 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495846 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495864 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495883 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495899 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495917 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495935 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495954 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495972 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495994 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496030 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496050 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496069 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496087 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496104 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496123 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496140 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496158 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496176 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496194 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496211 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496229 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496247 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496268 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496286 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496306 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496325 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496346 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496365 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496384 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496402 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496419 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496455 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496475 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496520 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496539 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496557 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496575 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496591 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496609 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496626 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.497395 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.497431 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.497568 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.497591 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.497609 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.497748 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.497777 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.497824 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.497883 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498169 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498254 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498275 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498327 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498345 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498363 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498406 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498426 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498481 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498499 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498517 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498563 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498581 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498602 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498650 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498668 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498686 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498737 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498908 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498933 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498983 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.499002 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.499062 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.499044 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.499177 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.499234 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.499254 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.499271 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.499668 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.499696 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501473 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501513 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501537 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501566 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501590 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501610 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501632 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501658 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501759 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501787 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501811 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501829 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501888 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501901 4644 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501913 4644 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501927 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501937 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.502383 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.504133 4644 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.507057 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491285 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491459 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491743 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491734 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491762 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491789 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.512111 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.491846 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492135 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492334 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492350 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492668 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.492864 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493034 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493206 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493233 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493272 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493487 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493595 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493662 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493749 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.493779 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494062 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494081 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494206 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494238 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.494514 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495003 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495169 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495461 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495575 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.495954 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496263 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496310 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.496423 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.497076 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.497144 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.497129 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.497213 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.497226 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498055 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498146 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498240 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498468 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498659 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498683 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.498638 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.499087 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.499216 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.499345 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.499611 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.500334 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.500793 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501277 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501299 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501340 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501561 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501618 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501899 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.501934 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.502461 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.502492 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.502910 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.503230 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.503269 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.503646 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.503789 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.504155 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.504207 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.504519 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.504703 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.504721 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.505614 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.505754 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.505922 4644 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.506205 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.507300 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.507659 4644 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.508312 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.508364 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.508394 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.508549 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.509078 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.509322 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.509527 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.509767 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.510585 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.508918 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.510880 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.511047 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.511843 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.512170 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.511403 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.509567 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.512719 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.512654 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.512937 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.512964 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:45:56.012943567 +0000 UTC m=+18.227894400 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.513048 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:45:56.013010003 +0000 UTC m=+18.227960836 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.513137 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.513147 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.513758 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.514117 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.514795 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.515047 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.515829 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.515949 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.515970 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.515983 4644 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.516030 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 06:45:56.016008807 +0000 UTC m=+18.230959640 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.516065 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.516075 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.516082 4644 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.516103 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 06:45:56.016096863 +0000 UTC m=+18.231047696 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.516230 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.516529 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: E1213 06:45:55.516612 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:45:56.016601308 +0000 UTC m=+18.231552141 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.516842 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.516812 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.516860 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.517090 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.517489 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.517564 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.517754 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.517948 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.517990 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.518106 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.518881 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.518953 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.519284 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.519579 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.519754 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.520059 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.520160 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.520180 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.520707 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.520749 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.520821 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.521046 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.521344 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.521494 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.521628 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.521694 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.521879 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.521982 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.527557 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.522069 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.527754 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.527766 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.527776 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.527794 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.527826 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.527845 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.528363 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.528629 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.529782 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.529803 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.530169 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.530372 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.530391 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.530378 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.530547 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.530698 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.530721 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.530812 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.530996 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.531339 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.531582 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.531630 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.533119 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.533761 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.533933 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.534079 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.534152 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.534465 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.534491 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.534502 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.534532 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.534574 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.534592 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.534608 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.534660 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.535158 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.535177 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.535187 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.535681 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.535837 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.535877 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.536210 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.536364 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.536544 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.536978 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.537643 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.538474 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.538605 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.538833 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.539328 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.539712 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.539776 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.539789 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.540272 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.540622 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.541472 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.541652 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.542140 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.542246 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.554674 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.559627 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.563327 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.567452 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602405 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602487 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602551 4644 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602563 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602574 4644 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602583 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602592 4644 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602602 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602611 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602620 4644 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602629 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602637 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602647 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602655 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602664 4644 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602674 4644 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602684 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602696 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602705 4644 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602713 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602722 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602731 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602739 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602747 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602756 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602766 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602778 4644 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602787 4644 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602796 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602805 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602814 4644 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602823 4644 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602831 4644 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602835 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602811 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602839 4644 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602908 4644 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602955 4644 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602968 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602983 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.602995 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603042 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603056 4644 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603067 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603099 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603111 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603123 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603134 4644 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603146 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603157 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603188 4644 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603200 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603210 4644 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603220 4644 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603230 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603258 4644 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603270 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603280 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603291 4644 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603302 4644 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603312 4644 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603322 4644 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603332 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603343 4644 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603355 4644 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603367 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603377 4644 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603387 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603398 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603408 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603418 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603428 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603452 4644 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603465 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603476 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603488 4644 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603500 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603511 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603522 4644 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603532 4644 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603542 4644 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603552 4644 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603563 4644 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603604 4644 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603614 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603622 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603634 4644 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603643 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603653 4644 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603663 4644 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603671 4644 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603680 4644 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603689 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603699 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603707 4644 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603716 4644 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603724 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603733 4644 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603741 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603750 4644 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603759 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603768 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603779 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603790 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603798 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603807 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603816 4644 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603825 4644 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603833 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603842 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603850 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603859 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603867 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603876 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603885 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603894 4644 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603902 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603911 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603920 4644 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603929 4644 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603937 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603946 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603954 4644 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603962 4644 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603971 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603980 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603988 4644 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.603998 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604010 4644 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604043 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604051 4644 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604060 4644 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604067 4644 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604075 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604083 4644 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604091 4644 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604099 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604107 4644 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604116 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604125 4644 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604133 4644 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604141 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604148 4644 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604156 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604165 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604173 4644 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604182 4644 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604191 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604200 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604208 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604216 4644 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604224 4644 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604233 4644 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604241 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604248 4644 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604258 4644 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604266 4644 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604274 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604282 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604291 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604300 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604310 4644 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604318 4644 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604327 4644 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604336 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604344 4644 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604352 4644 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604360 4644 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604369 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604378 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604387 4644 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604396 4644 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604405 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604414 4644 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604422 4644 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604430 4644 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604437 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604457 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604465 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604473 4644 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604483 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604491 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604498 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604507 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604515 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604524 4644 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604532 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604541 4644 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604549 4644 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604558 4644 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604566 4644 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604574 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.604584 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.665479 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.672363 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 06:45:55 crc kubenswrapper[4644]: I1213 06:45:55.684332 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.108893 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.108972 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.109000 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.109040 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:45:56 crc kubenswrapper[4644]: E1213 06:45:56.109077 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:45:57.109043696 +0000 UTC m=+19.323994530 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:45:56 crc kubenswrapper[4644]: E1213 06:45:56.109109 4644 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.109136 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:45:56 crc kubenswrapper[4644]: E1213 06:45:56.109168 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:45:57.109154095 +0000 UTC m=+19.324104928 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:45:56 crc kubenswrapper[4644]: E1213 06:45:56.109261 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:45:56 crc kubenswrapper[4644]: E1213 06:45:56.109308 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:45:56 crc kubenswrapper[4644]: E1213 06:45:56.109323 4644 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:56 crc kubenswrapper[4644]: E1213 06:45:56.109279 4644 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:45:56 crc kubenswrapper[4644]: E1213 06:45:56.109396 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 06:45:57.109369242 +0000 UTC m=+19.324320075 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:56 crc kubenswrapper[4644]: E1213 06:45:56.109429 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:45:57.109418165 +0000 UTC m=+19.324369008 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:45:56 crc kubenswrapper[4644]: E1213 06:45:56.109479 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:45:56 crc kubenswrapper[4644]: E1213 06:45:56.109495 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:45:56 crc kubenswrapper[4644]: E1213 06:45:56.109508 4644 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:56 crc kubenswrapper[4644]: E1213 06:45:56.109547 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 06:45:57.109535266 +0000 UTC m=+19.324486109 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.392222 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.392863 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.393484 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.394107 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.394623 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.395095 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.395606 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.396088 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.396647 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.397108 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.397572 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.398164 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.398637 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.399088 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.399560 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.400014 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.400504 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.400871 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.404086 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.404597 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.405365 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.405874 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.406273 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.407930 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.408329 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.409242 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.409805 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.410583 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.411089 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.415641 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.416066 4644 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.416161 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.417988 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.418456 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.418842 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.420171 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.421020 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.421485 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.422352 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.422988 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.423743 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.424265 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.425149 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.425703 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.426434 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.426926 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.427762 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.428381 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.429121 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.429545 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.430302 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.430769 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.431260 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.432047 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.462136 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.464247 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7"} Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.464337 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.465655 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"f1e9ee5da52d8231e74e7684d84e837be350a4b1d9b4392df3edeaa079c21d7d"} Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.467059 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc"} Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.467090 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a"} Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.467101 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"62271eab024e0718aaee4c2ec84e4c0dfca396404c771a112e05c0afc6d0998c"} Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.468816 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957"} Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.468842 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"6c978ed7c3751f078ac1029a05d3d9be669dad90b71138d8a2831dffbda37ed2"} Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.477299 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.492128 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.505326 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.519369 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.530490 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.541637 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.556188 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.569683 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.581090 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.600379 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.613464 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.624926 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.637580 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.649797 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.651942 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-wvvsb"] Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.652249 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-lbk25"] Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.652427 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-wvvsb" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.652500 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.654782 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.655090 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.655234 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.655354 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.655815 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.655850 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.656424 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.656474 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.668191 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.684040 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.706378 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.713723 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-run-k8s-cni-cncf-io\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.713757 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-hostroot\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.713777 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-etc-kubernetes\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.713801 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-os-release\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.713837 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-var-lib-cni-bin\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.713851 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-multus-daemon-config\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.713899 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-run-multus-certs\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.713918 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-system-cni-dir\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.713934 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-cni-binary-copy\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.713952 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-var-lib-kubelet\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.713971 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-multus-conf-dir\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.713989 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2z58d\" (UniqueName: \"kubernetes.io/projected/bc018066-67ca-4e62-a670-18812ca830a7-kube-api-access-2z58d\") pod \"node-resolver-wvvsb\" (UID: \"bc018066-67ca-4e62-a670-18812ca830a7\") " pod="openshift-dns/node-resolver-wvvsb" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.714017 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-run-netns\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.714047 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-var-lib-cni-multus\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.714071 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-cnibin\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.714088 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4t56\" (UniqueName: \"kubernetes.io/projected/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-kube-api-access-c4t56\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.714105 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/bc018066-67ca-4e62-a670-18812ca830a7-hosts-file\") pod \"node-resolver-wvvsb\" (UID: \"bc018066-67ca-4e62-a670-18812ca830a7\") " pod="openshift-dns/node-resolver-wvvsb" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.714130 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-multus-socket-dir-parent\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.714148 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-multus-cni-dir\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.725486 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.740749 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.752716 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.766273 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.777634 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.791952 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.806663 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815189 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-multus-socket-dir-parent\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815227 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-multus-cni-dir\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815256 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-run-k8s-cni-cncf-io\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815272 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-hostroot\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815293 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-etc-kubernetes\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815321 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-os-release\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815338 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-var-lib-cni-bin\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815353 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-multus-daemon-config\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815368 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-run-multus-certs\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815385 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-system-cni-dir\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815400 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-cni-binary-copy\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815417 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-var-lib-kubelet\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815416 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-run-k8s-cni-cncf-io\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815471 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-hostroot\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815497 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-multus-conf-dir\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815493 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-etc-kubernetes\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815471 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-var-lib-cni-bin\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815489 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-multus-socket-dir-parent\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815541 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-run-multus-certs\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815436 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-multus-conf-dir\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815540 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-multus-cni-dir\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815643 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-os-release\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815694 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-system-cni-dir\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815707 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-var-lib-kubelet\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815738 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2z58d\" (UniqueName: \"kubernetes.io/projected/bc018066-67ca-4e62-a670-18812ca830a7-kube-api-access-2z58d\") pod \"node-resolver-wvvsb\" (UID: \"bc018066-67ca-4e62-a670-18812ca830a7\") " pod="openshift-dns/node-resolver-wvvsb" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815844 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-run-netns\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815878 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-var-lib-cni-multus\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815900 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-run-netns\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815920 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-host-var-lib-cni-multus\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815959 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-cnibin\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815979 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4t56\" (UniqueName: \"kubernetes.io/projected/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-kube-api-access-c4t56\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.815996 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/bc018066-67ca-4e62-a670-18812ca830a7-hosts-file\") pod \"node-resolver-wvvsb\" (UID: \"bc018066-67ca-4e62-a670-18812ca830a7\") " pod="openshift-dns/node-resolver-wvvsb" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.816066 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/bc018066-67ca-4e62-a670-18812ca830a7-hosts-file\") pod \"node-resolver-wvvsb\" (UID: \"bc018066-67ca-4e62-a670-18812ca830a7\") " pod="openshift-dns/node-resolver-wvvsb" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.816076 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-cnibin\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.816154 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-multus-daemon-config\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.816241 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-cni-binary-copy\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.816610 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.825637 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.835487 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4t56\" (UniqueName: \"kubernetes.io/projected/9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd-kube-api-access-c4t56\") pod \"multus-lbk25\" (UID: \"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\") " pod="openshift-multus/multus-lbk25" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.835511 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2z58d\" (UniqueName: \"kubernetes.io/projected/bc018066-67ca-4e62-a670-18812ca830a7-kube-api-access-2z58d\") pod \"node-resolver-wvvsb\" (UID: \"bc018066-67ca-4e62-a670-18812ca830a7\") " pod="openshift-dns/node-resolver-wvvsb" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.837762 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.848835 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.858050 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.867737 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.876994 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.891959 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:56Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.964493 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-wvvsb" Dec 13 06:45:56 crc kubenswrapper[4644]: I1213 06:45:56.970005 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-lbk25" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.042328 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-bj6c2"] Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.042930 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-45tj4"] Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.043190 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.043305 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.044688 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-ncsgn"] Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.045404 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.049113 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.049299 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.049430 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.049160 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.049170 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.049177 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.049588 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.049202 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.049952 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.050010 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.050113 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.050174 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.050318 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.050421 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.066057 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.080572 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.094656 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.110098 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119324 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119435 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.119497 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:45:59.119474449 +0000 UTC m=+21.334425282 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119529 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/48240f19-087e-4597-b448-ab1a190a5027-rootfs\") pod \"machine-config-daemon-45tj4\" (UID: \"48240f19-087e-4597-b448-ab1a190a5027\") " pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119558 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-etc-openvswitch\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119576 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovnkube-script-lib\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.119589 4644 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119599 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b15a4861-38b1-4144-b0ae-7a079a389221-tuning-conf-dir\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.119669 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:45:59.119649731 +0000 UTC m=+21.334600564 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119692 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119719 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-cni-bin\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119734 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b15a4861-38b1-4144-b0ae-7a079a389221-os-release\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119756 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-openvswitch\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119776 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-systemd-units\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119793 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovnkube-config\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.119805 4644 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119811 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-kubelet\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119825 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/48240f19-087e-4597-b448-ab1a190a5027-proxy-tls\") pod \"machine-config-daemon-45tj4\" (UID: \"48240f19-087e-4597-b448-ab1a190a5027\") " pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.119845 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:45:59.119836605 +0000 UTC m=+21.334787438 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119862 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-slash\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119881 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-ovn\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119896 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-env-overrides\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119911 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlbwf\" (UniqueName: \"kubernetes.io/projected/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-kube-api-access-zlbwf\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119928 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b15a4861-38b1-4144-b0ae-7a079a389221-cnibin\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119945 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-systemd\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119969 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b15a4861-38b1-4144-b0ae-7a079a389221-cni-binary-copy\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.119990 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-node-log\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.120007 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/48240f19-087e-4597-b448-ab1a190a5027-mcd-auth-proxy-config\") pod \"machine-config-daemon-45tj4\" (UID: \"48240f19-087e-4597-b448-ab1a190a5027\") " pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.120024 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-run-ovn-kubernetes\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.120058 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.120078 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovn-node-metrics-cert\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.120093 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b15a4861-38b1-4144-b0ae-7a079a389221-system-cni-dir\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.120112 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-run-netns\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.120126 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-log-socket\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.120141 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b15a4861-38b1-4144-b0ae-7a079a389221-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.120170 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.120193 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-var-lib-openvswitch\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.120215 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xxg7\" (UniqueName: \"kubernetes.io/projected/48240f19-087e-4597-b448-ab1a190a5027-kube-api-access-9xxg7\") pod \"machine-config-daemon-45tj4\" (UID: \"48240f19-087e-4597-b448-ab1a190a5027\") " pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.120239 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-cni-netd\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.120259 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.120276 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c446z\" (UniqueName: \"kubernetes.io/projected/b15a4861-38b1-4144-b0ae-7a079a389221-kube-api-access-c446z\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.120320 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.120337 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.120349 4644 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.120369 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.120381 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.120392 4644 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.120418 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 06:45:59.12041015 +0000 UTC m=+21.335360983 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.120436 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 06:45:59.120423305 +0000 UTC m=+21.335374138 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.123105 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.134834 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.144833 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.153901 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.163174 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.175031 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.184521 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.198100 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.208973 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.220120 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221600 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-var-lib-openvswitch\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221646 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xxg7\" (UniqueName: \"kubernetes.io/projected/48240f19-087e-4597-b448-ab1a190a5027-kube-api-access-9xxg7\") pod \"machine-config-daemon-45tj4\" (UID: \"48240f19-087e-4597-b448-ab1a190a5027\") " pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221663 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-cni-netd\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221684 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c446z\" (UniqueName: \"kubernetes.io/projected/b15a4861-38b1-4144-b0ae-7a079a389221-kube-api-access-c446z\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221706 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/48240f19-087e-4597-b448-ab1a190a5027-rootfs\") pod \"machine-config-daemon-45tj4\" (UID: \"48240f19-087e-4597-b448-ab1a190a5027\") " pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221722 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-etc-openvswitch\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221737 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovnkube-script-lib\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221751 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b15a4861-38b1-4144-b0ae-7a079a389221-tuning-conf-dir\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221768 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-cni-bin\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221782 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b15a4861-38b1-4144-b0ae-7a079a389221-os-release\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221805 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-openvswitch\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221819 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-systemd-units\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221831 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovnkube-config\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221844 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/48240f19-087e-4597-b448-ab1a190a5027-proxy-tls\") pod \"machine-config-daemon-45tj4\" (UID: \"48240f19-087e-4597-b448-ab1a190a5027\") " pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221861 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-kubelet\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221877 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-slash\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221891 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-ovn\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221904 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-env-overrides\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221917 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlbwf\" (UniqueName: \"kubernetes.io/projected/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-kube-api-access-zlbwf\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221931 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b15a4861-38b1-4144-b0ae-7a079a389221-cnibin\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221944 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b15a4861-38b1-4144-b0ae-7a079a389221-cni-binary-copy\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221960 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-systemd\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.221974 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-node-log\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.222001 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/48240f19-087e-4597-b448-ab1a190a5027-mcd-auth-proxy-config\") pod \"machine-config-daemon-45tj4\" (UID: \"48240f19-087e-4597-b448-ab1a190a5027\") " pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.222020 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-run-ovn-kubernetes\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.222045 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.222061 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovn-node-metrics-cert\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.222076 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b15a4861-38b1-4144-b0ae-7a079a389221-system-cni-dir\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.222090 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-run-netns\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.222103 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-log-socket\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.222117 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b15a4861-38b1-4144-b0ae-7a079a389221-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.222751 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b15a4861-38b1-4144-b0ae-7a079a389221-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.222803 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-var-lib-openvswitch\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.222991 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-cni-netd\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.223151 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/48240f19-087e-4597-b448-ab1a190a5027-rootfs\") pod \"machine-config-daemon-45tj4\" (UID: \"48240f19-087e-4597-b448-ab1a190a5027\") " pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.223183 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-etc-openvswitch\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.223676 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovnkube-script-lib\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.223748 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b15a4861-38b1-4144-b0ae-7a079a389221-tuning-conf-dir\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.223779 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-cni-bin\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.223987 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b15a4861-38b1-4144-b0ae-7a079a389221-os-release\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.224068 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-openvswitch\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.224167 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-systemd-units\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.224493 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-systemd\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.224772 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovnkube-config\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.224834 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-node-log\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.225075 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-slash\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.225138 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-kubelet\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.225131 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b15a4861-38b1-4144-b0ae-7a079a389221-cnibin\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.225216 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-run-ovn-kubernetes\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.225232 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-ovn\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.225275 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.225308 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-run-netns\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.225336 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b15a4861-38b1-4144-b0ae-7a079a389221-system-cni-dir\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.225368 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-log-socket\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.225397 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/48240f19-087e-4597-b448-ab1a190a5027-mcd-auth-proxy-config\") pod \"machine-config-daemon-45tj4\" (UID: \"48240f19-087e-4597-b448-ab1a190a5027\") " pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.225724 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b15a4861-38b1-4144-b0ae-7a079a389221-cni-binary-copy\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.226164 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-env-overrides\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.231857 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/48240f19-087e-4597-b448-ab1a190a5027-proxy-tls\") pod \"machine-config-daemon-45tj4\" (UID: \"48240f19-087e-4597-b448-ab1a190a5027\") " pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.231888 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovn-node-metrics-cert\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.235815 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.237655 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xxg7\" (UniqueName: \"kubernetes.io/projected/48240f19-087e-4597-b448-ab1a190a5027-kube-api-access-9xxg7\") pod \"machine-config-daemon-45tj4\" (UID: \"48240f19-087e-4597-b448-ab1a190a5027\") " pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.237665 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c446z\" (UniqueName: \"kubernetes.io/projected/b15a4861-38b1-4144-b0ae-7a079a389221-kube-api-access-c446z\") pod \"multus-additional-cni-plugins-ncsgn\" (UID: \"b15a4861-38b1-4144-b0ae-7a079a389221\") " pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.240337 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlbwf\" (UniqueName: \"kubernetes.io/projected/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-kube-api-access-zlbwf\") pod \"ovnkube-node-bj6c2\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.245571 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.254768 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.263260 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.273879 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.283969 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.292980 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.304655 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.356479 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.363396 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:45:57 crc kubenswrapper[4644]: W1213 06:45:57.366662 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48240f19_087e_4597_b448_ab1a190a5027.slice/crio-54d61becddfc7007e60b949812e26f418dc16971f2396c1e1142ee0f005c6a3f WatchSource:0}: Error finding container 54d61becddfc7007e60b949812e26f418dc16971f2396c1e1142ee0f005c6a3f: Status 404 returned error can't find the container with id 54d61becddfc7007e60b949812e26f418dc16971f2396c1e1142ee0f005c6a3f Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.369410 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.389071 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.389159 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.389186 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.389214 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.389424 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:45:57 crc kubenswrapper[4644]: E1213 06:45:57.389350 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:45:57 crc kubenswrapper[4644]: W1213 06:45:57.390585 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb15a4861_38b1_4144_b0ae_7a079a389221.slice/crio-1433649ba91cb1f5db9f90dc2ea6ddd31afb1421207cdbbbd2352b39b57b0981 WatchSource:0}: Error finding container 1433649ba91cb1f5db9f90dc2ea6ddd31afb1421207cdbbbd2352b39b57b0981: Status 404 returned error can't find the container with id 1433649ba91cb1f5db9f90dc2ea6ddd31afb1421207cdbbbd2352b39b57b0981 Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.476151 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-wvvsb" event={"ID":"bc018066-67ca-4e62-a670-18812ca830a7","Type":"ContainerStarted","Data":"a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d"} Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.476222 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-wvvsb" event={"ID":"bc018066-67ca-4e62-a670-18812ca830a7","Type":"ContainerStarted","Data":"6d150ca31bdc352e53140bf28d554dc0032d6032c3eabf3fb4a334216d99678a"} Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.478352 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" event={"ID":"b15a4861-38b1-4144-b0ae-7a079a389221","Type":"ContainerStarted","Data":"1433649ba91cb1f5db9f90dc2ea6ddd31afb1421207cdbbbd2352b39b57b0981"} Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.479994 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerStarted","Data":"82d18638f84e8a5c89b2416b7b117d7bb0fa410cf86bd295adb83354d34550c0"} Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.481209 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerStarted","Data":"54d61becddfc7007e60b949812e26f418dc16971f2396c1e1142ee0f005c6a3f"} Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.483147 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lbk25" event={"ID":"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd","Type":"ContainerStarted","Data":"f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43"} Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.483174 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lbk25" event={"ID":"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd","Type":"ContainerStarted","Data":"81d8ef980d910ec78787d3c8162d977c8c7cfcb25360e4cb8130ea308ea50371"} Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.499256 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.523307 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.536368 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.547407 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.557976 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.567352 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.574777 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.583659 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.596708 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.609941 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.619565 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.653997 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.695958 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.735196 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.778495 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.812790 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.852950 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.891778 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.933090 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:57 crc kubenswrapper[4644]: I1213 06:45:57.971699 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:57Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.014992 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.054128 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.093530 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.132853 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.390880 4644 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.392411 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.392459 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.392470 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.392588 4644 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.399475 4644 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.399688 4644 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.405030 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.405227 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.405309 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.405195 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.405388 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.405549 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:58Z","lastTransitionTime":"2025-12-13T06:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.419884 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: E1213 06:45:58.428160 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.430157 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.431453 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.431509 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.431520 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.431536 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.431548 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:58Z","lastTransitionTime":"2025-12-13T06:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.439365 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: E1213 06:45:58.441874 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.446987 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.447110 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.447185 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.447263 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.447319 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:58Z","lastTransitionTime":"2025-12-13T06:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.453596 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: E1213 06:45:58.458386 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.464077 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.464108 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.464118 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.464135 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.464147 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:58Z","lastTransitionTime":"2025-12-13T06:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.466186 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: E1213 06:45:58.474114 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.479431 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.479495 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.479503 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.479518 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.479548 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:58Z","lastTransitionTime":"2025-12-13T06:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.479997 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.487067 4644 generic.go:334] "Generic (PLEG): container finished" podID="b15a4861-38b1-4144-b0ae-7a079a389221" containerID="08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23" exitCode=0 Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.487155 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" event={"ID":"b15a4861-38b1-4144-b0ae-7a079a389221","Type":"ContainerDied","Data":"08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23"} Dec 13 06:45:58 crc kubenswrapper[4644]: E1213 06:45:58.489842 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: E1213 06:45:58.490153 4644 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.492543 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.493457 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerStarted","Data":"6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c"} Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.493485 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerStarted","Data":"cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6"} Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.495694 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.495737 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.495748 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.495771 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.495782 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:58Z","lastTransitionTime":"2025-12-13T06:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.500797 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233"} Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.503355 4644 generic.go:334] "Generic (PLEG): container finished" podID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerID="64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7" exitCode=0 Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.503389 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerDied","Data":"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7"} Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.534179 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.572668 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.598408 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.598504 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.598518 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.598533 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.598542 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:58Z","lastTransitionTime":"2025-12-13T06:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.612697 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.653079 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.697996 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.701270 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.701319 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.701330 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.701346 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.701354 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:58Z","lastTransitionTime":"2025-12-13T06:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.734580 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.773533 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.807050 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.807107 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.807116 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.807133 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.807142 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:58Z","lastTransitionTime":"2025-12-13T06:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.814960 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.853631 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.893545 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.909193 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.909235 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.909246 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.909264 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.909274 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:58Z","lastTransitionTime":"2025-12-13T06:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.934395 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.971972 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.987623 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 13 06:45:58 crc kubenswrapper[4644]: I1213 06:45:58.997785 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.011710 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.011747 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.011756 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.011770 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.011780 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:59Z","lastTransitionTime":"2025-12-13T06:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.014401 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.035014 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.073397 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.114569 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.114617 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.114625 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.114643 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.114653 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:59Z","lastTransitionTime":"2025-12-13T06:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.115526 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.138181 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.138287 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.138315 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.138339 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.138362 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.138424 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:46:03.138394694 +0000 UTC m=+25.353345527 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.138489 4644 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.138532 4644 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.138546 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:03.138531493 +0000 UTC m=+25.353482326 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.138547 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.138581 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.138581 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:03.138568523 +0000 UTC m=+25.353519357 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.138596 4644 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.138670 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:03.138645158 +0000 UTC m=+25.353595991 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.138746 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.138757 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.138766 4644 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.138806 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:03.138798047 +0000 UTC m=+25.353748870 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.153209 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.193308 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.216966 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.217004 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.217012 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.217028 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.217040 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:59Z","lastTransitionTime":"2025-12-13T06:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.233606 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.272200 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.273094 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-6lkrr"] Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.273568 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-6lkrr" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.306285 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.318768 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.318816 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.318826 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.318842 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.318854 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:59Z","lastTransitionTime":"2025-12-13T06:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.326010 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.340343 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/5997b569-2b35-4c1c-bcdd-2d89d9beeefe-serviceca\") pod \"node-ca-6lkrr\" (UID: \"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\") " pod="openshift-image-registry/node-ca-6lkrr" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.340409 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5997b569-2b35-4c1c-bcdd-2d89d9beeefe-host\") pod \"node-ca-6lkrr\" (UID: \"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\") " pod="openshift-image-registry/node-ca-6lkrr" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.340618 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7blsz\" (UniqueName: \"kubernetes.io/projected/5997b569-2b35-4c1c-bcdd-2d89d9beeefe-kube-api-access-7blsz\") pod \"node-ca-6lkrr\" (UID: \"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\") " pod="openshift-image-registry/node-ca-6lkrr" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.346826 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.366099 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.388482 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.388522 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.388502 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.388636 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.388739 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:45:59 crc kubenswrapper[4644]: E1213 06:45:59.388800 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.393153 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.421681 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.421727 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.421735 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.421787 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.421801 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:59Z","lastTransitionTime":"2025-12-13T06:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.434238 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.441660 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7blsz\" (UniqueName: \"kubernetes.io/projected/5997b569-2b35-4c1c-bcdd-2d89d9beeefe-kube-api-access-7blsz\") pod \"node-ca-6lkrr\" (UID: \"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\") " pod="openshift-image-registry/node-ca-6lkrr" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.441704 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/5997b569-2b35-4c1c-bcdd-2d89d9beeefe-serviceca\") pod \"node-ca-6lkrr\" (UID: \"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\") " pod="openshift-image-registry/node-ca-6lkrr" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.441731 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5997b569-2b35-4c1c-bcdd-2d89d9beeefe-host\") pod \"node-ca-6lkrr\" (UID: \"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\") " pod="openshift-image-registry/node-ca-6lkrr" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.441806 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5997b569-2b35-4c1c-bcdd-2d89d9beeefe-host\") pod \"node-ca-6lkrr\" (UID: \"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\") " pod="openshift-image-registry/node-ca-6lkrr" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.442680 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/5997b569-2b35-4c1c-bcdd-2d89d9beeefe-serviceca\") pod \"node-ca-6lkrr\" (UID: \"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\") " pod="openshift-image-registry/node-ca-6lkrr" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.480672 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7blsz\" (UniqueName: \"kubernetes.io/projected/5997b569-2b35-4c1c-bcdd-2d89d9beeefe-kube-api-access-7blsz\") pod \"node-ca-6lkrr\" (UID: \"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\") " pod="openshift-image-registry/node-ca-6lkrr" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.496628 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.507906 4644 generic.go:334] "Generic (PLEG): container finished" podID="b15a4861-38b1-4144-b0ae-7a079a389221" containerID="71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c" exitCode=0 Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.507956 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" event={"ID":"b15a4861-38b1-4144-b0ae-7a079a389221","Type":"ContainerDied","Data":"71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.512673 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerStarted","Data":"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.512716 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerStarted","Data":"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.512734 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerStarted","Data":"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.512743 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerStarted","Data":"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.512752 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerStarted","Data":"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.512760 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerStarted","Data":"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.523792 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.523833 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.523844 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.523860 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.523870 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:59Z","lastTransitionTime":"2025-12-13T06:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.537601 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.573134 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.584280 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-6lkrr" Dec 13 06:45:59 crc kubenswrapper[4644]: W1213 06:45:59.599139 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5997b569_2b35_4c1c_bcdd_2d89d9beeefe.slice/crio-552a259a08649dda22676221b7b909f65cde5da7a2895ef504d7221134500034 WatchSource:0}: Error finding container 552a259a08649dda22676221b7b909f65cde5da7a2895ef504d7221134500034: Status 404 returned error can't find the container with id 552a259a08649dda22676221b7b909f65cde5da7a2895ef504d7221134500034 Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.614379 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.627698 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.627727 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.627737 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.627771 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.627781 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:59Z","lastTransitionTime":"2025-12-13T06:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.653806 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.697905 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.730825 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.730872 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.730882 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.730900 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.730934 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:59Z","lastTransitionTime":"2025-12-13T06:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.733153 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.772326 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.817558 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.833390 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.833425 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.833433 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.833465 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.833478 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:59Z","lastTransitionTime":"2025-12-13T06:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.852678 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.892429 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.930713 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.935508 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.935546 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.935557 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.935572 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.935582 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:45:59Z","lastTransitionTime":"2025-12-13T06:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:45:59 crc kubenswrapper[4644]: I1213 06:45:59.973488 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:45:59Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.012265 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.037944 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.037990 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.038001 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.038020 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.038038 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:00Z","lastTransitionTime":"2025-12-13T06:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.056253 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.092366 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.131870 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.140603 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.140653 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.140664 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.140744 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.140757 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:00Z","lastTransitionTime":"2025-12-13T06:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.175389 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.214086 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.242914 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.242945 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.242956 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.242969 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.242981 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:00Z","lastTransitionTime":"2025-12-13T06:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.252970 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.293018 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.339838 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.345360 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.345402 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.345413 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.345430 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.345454 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:00Z","lastTransitionTime":"2025-12-13T06:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.447421 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.447487 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.447497 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.447512 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.447522 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:00Z","lastTransitionTime":"2025-12-13T06:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.517259 4644 generic.go:334] "Generic (PLEG): container finished" podID="b15a4861-38b1-4144-b0ae-7a079a389221" containerID="d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d" exitCode=0 Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.517333 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" event={"ID":"b15a4861-38b1-4144-b0ae-7a079a389221","Type":"ContainerDied","Data":"d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d"} Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.519626 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-6lkrr" event={"ID":"5997b569-2b35-4c1c-bcdd-2d89d9beeefe","Type":"ContainerStarted","Data":"921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c"} Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.519655 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-6lkrr" event={"ID":"5997b569-2b35-4c1c-bcdd-2d89d9beeefe","Type":"ContainerStarted","Data":"552a259a08649dda22676221b7b909f65cde5da7a2895ef504d7221134500034"} Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.530575 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.543053 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.549749 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.549800 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.549811 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.549828 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.550144 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:00Z","lastTransitionTime":"2025-12-13T06:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.561226 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.571469 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.581314 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.590611 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.613207 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.652707 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.653111 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.653169 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.653181 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.653199 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.653210 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:00Z","lastTransitionTime":"2025-12-13T06:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.691818 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.737639 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.756132 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.756169 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.756180 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.756195 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.756205 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:00Z","lastTransitionTime":"2025-12-13T06:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.774334 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.813464 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.853128 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.858867 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.858903 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.858915 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.858928 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.858938 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:00Z","lastTransitionTime":"2025-12-13T06:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.894626 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.931748 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.961416 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.961464 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.961473 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.961486 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.961495 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:00Z","lastTransitionTime":"2025-12-13T06:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:00 crc kubenswrapper[4644]: I1213 06:46:00.977781 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:00Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.013437 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.051204 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.064284 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.064316 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.064325 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.064338 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.064347 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:01Z","lastTransitionTime":"2025-12-13T06:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.094144 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.133866 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.166715 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.166752 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.166762 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.166775 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.166786 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:01Z","lastTransitionTime":"2025-12-13T06:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.172877 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.213263 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.253329 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.269223 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.269268 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.269278 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.269299 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.269309 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:01Z","lastTransitionTime":"2025-12-13T06:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.296986 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.335653 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.371274 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.371312 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.371321 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.371334 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.371344 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:01Z","lastTransitionTime":"2025-12-13T06:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.372694 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.388340 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.388428 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:01 crc kubenswrapper[4644]: E1213 06:46:01.388541 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.388563 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:01 crc kubenswrapper[4644]: E1213 06:46:01.388615 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:01 crc kubenswrapper[4644]: E1213 06:46:01.388677 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.413600 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.452370 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.473256 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.473285 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.473310 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.473323 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.473332 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:01Z","lastTransitionTime":"2025-12-13T06:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.526285 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerStarted","Data":"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437"} Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.530050 4644 generic.go:334] "Generic (PLEG): container finished" podID="b15a4861-38b1-4144-b0ae-7a079a389221" containerID="4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9" exitCode=0 Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.530113 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" event={"ID":"b15a4861-38b1-4144-b0ae-7a079a389221","Type":"ContainerDied","Data":"4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9"} Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.545596 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.558912 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.572437 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.576513 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.576545 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.576554 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.576569 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.576581 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:01Z","lastTransitionTime":"2025-12-13T06:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.613119 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.653392 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.678632 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.678660 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.678671 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.678685 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.678693 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:01Z","lastTransitionTime":"2025-12-13T06:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.698022 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.735188 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.773725 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.781662 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.781705 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.781714 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.781728 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.781738 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:01Z","lastTransitionTime":"2025-12-13T06:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.814593 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.855089 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.883605 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.883642 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.883651 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.883664 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.883674 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:01Z","lastTransitionTime":"2025-12-13T06:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.897393 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.899841 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.902639 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.932222 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.952983 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.985803 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.985842 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.985851 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.985865 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.985874 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:01Z","lastTransitionTime":"2025-12-13T06:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:01 crc kubenswrapper[4644]: I1213 06:46:01.991197 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:01Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.030921 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.073228 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.088669 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.088709 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.088719 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.088735 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.088746 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:02Z","lastTransitionTime":"2025-12-13T06:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.113200 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.151035 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.191032 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.191081 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.191111 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.191131 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.191141 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:02Z","lastTransitionTime":"2025-12-13T06:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.191383 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.236428 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.272950 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.293913 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.293982 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.293994 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.294009 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.294019 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:02Z","lastTransitionTime":"2025-12-13T06:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.313339 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.355550 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.394310 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.395212 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.395242 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.395251 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.395264 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.395275 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:02Z","lastTransitionTime":"2025-12-13T06:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.439018 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.476324 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.497586 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.497630 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.497641 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.497656 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.497676 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:02Z","lastTransitionTime":"2025-12-13T06:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.514246 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.535522 4644 generic.go:334] "Generic (PLEG): container finished" podID="b15a4861-38b1-4144-b0ae-7a079a389221" containerID="b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3" exitCode=0 Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.535558 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" event={"ID":"b15a4861-38b1-4144-b0ae-7a079a389221","Type":"ContainerDied","Data":"b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3"} Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.553750 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.594260 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.599970 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.600009 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.600018 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.600037 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.600048 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:02Z","lastTransitionTime":"2025-12-13T06:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.634468 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.675968 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.702661 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.702708 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.702717 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.702734 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.702743 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:02Z","lastTransitionTime":"2025-12-13T06:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.713305 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.758893 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.794026 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.806521 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.806569 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.806593 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.806613 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.806624 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:02Z","lastTransitionTime":"2025-12-13T06:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.832995 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.872333 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.908763 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.908815 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.908824 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.908842 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.908852 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:02Z","lastTransitionTime":"2025-12-13T06:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.917054 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.955054 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:02 crc kubenswrapper[4644]: I1213 06:46:02.993532 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:02Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.011333 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.011373 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.011383 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.011399 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.011410 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:03Z","lastTransitionTime":"2025-12-13T06:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.039827 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.074314 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.113575 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.113629 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.113639 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.113657 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.113668 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:03Z","lastTransitionTime":"2025-12-13T06:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.115203 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.154767 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.178620 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.178733 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.178762 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:46:11.178742416 +0000 UTC m=+33.393693249 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.178802 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.178866 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.178889 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.178901 4644 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.178937 4644 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.178950 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:11.178937074 +0000 UTC m=+33.393887907 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.178954 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.178966 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:11.178959828 +0000 UTC m=+33.393910661 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.178973 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.178984 4644 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.179013 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:11.179005173 +0000 UTC m=+33.393956006 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.178867 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.179067 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.179164 4644 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.179199 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:11.179192458 +0000 UTC m=+33.394143292 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.193943 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.216815 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.216854 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.216864 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.216878 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.216887 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:03Z","lastTransitionTime":"2025-12-13T06:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.234843 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.319587 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.319626 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.319639 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.319656 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.319667 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:03Z","lastTransitionTime":"2025-12-13T06:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.388598 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.388594 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.388771 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.388801 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.388604 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:03 crc kubenswrapper[4644]: E1213 06:46:03.388879 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.421864 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.421897 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.421906 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.421921 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.421931 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:03Z","lastTransitionTime":"2025-12-13T06:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.523941 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.523987 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.523997 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.524013 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.524025 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:03Z","lastTransitionTime":"2025-12-13T06:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.547869 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerStarted","Data":"aaa94653d9aa50e0db74f81acb09aafadb1d6a3683cd74a7cfd9e493db04ae24"} Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.548294 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.548312 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.552513 4644 generic.go:334] "Generic (PLEG): container finished" podID="b15a4861-38b1-4144-b0ae-7a079a389221" containerID="95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2" exitCode=0 Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.552561 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" event={"ID":"b15a4861-38b1-4144-b0ae-7a079a389221","Type":"ContainerDied","Data":"95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2"} Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.561951 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.570882 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.571344 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.576006 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.587255 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.598431 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.609922 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.624256 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaa94653d9aa50e0db74f81acb09aafadb1d6a3683cd74a7cfd9e493db04ae24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.626548 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.626590 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.626600 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.626616 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.626626 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:03Z","lastTransitionTime":"2025-12-13T06:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.635886 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.645084 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.654323 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.663459 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.678107 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.713790 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.729166 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.729201 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.729210 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.729224 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.729234 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:03Z","lastTransitionTime":"2025-12-13T06:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.753549 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.793742 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.831753 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.831808 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.831818 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.831839 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.831851 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:03Z","lastTransitionTime":"2025-12-13T06:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.835317 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.874473 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.912614 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.934319 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.934352 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.934361 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.934374 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.934383 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:03Z","lastTransitionTime":"2025-12-13T06:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.955852 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:03 crc kubenswrapper[4644]: I1213 06:46:03.995463 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:03Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.033393 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.036836 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.036874 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.036885 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.036900 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.036909 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:04Z","lastTransitionTime":"2025-12-13T06:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.077647 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaa94653d9aa50e0db74f81acb09aafadb1d6a3683cd74a7cfd9e493db04ae24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.118529 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.139665 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.139724 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.139735 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.139756 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.139772 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:04Z","lastTransitionTime":"2025-12-13T06:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.156008 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.198195 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.235386 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.242603 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.242682 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.242698 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.242725 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.242742 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:04Z","lastTransitionTime":"2025-12-13T06:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.278781 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.316937 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.344972 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.345010 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.345018 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.345034 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.345043 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:04Z","lastTransitionTime":"2025-12-13T06:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.353657 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.393815 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.431947 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.447551 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.447599 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.447609 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.447627 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.447641 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:04Z","lastTransitionTime":"2025-12-13T06:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.550856 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.550914 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.550925 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.550946 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.550959 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:04Z","lastTransitionTime":"2025-12-13T06:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.561761 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" event={"ID":"b15a4861-38b1-4144-b0ae-7a079a389221","Type":"ContainerStarted","Data":"1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c"} Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.561827 4644 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.577351 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.595717 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaa94653d9aa50e0db74f81acb09aafadb1d6a3683cd74a7cfd9e493db04ae24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.610724 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.625913 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.635191 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.654148 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.654193 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.654203 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.654222 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.654233 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:04Z","lastTransitionTime":"2025-12-13T06:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.674423 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.718711 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.756104 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.757104 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.757140 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.757149 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.757162 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.757170 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:04Z","lastTransitionTime":"2025-12-13T06:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.794050 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.831907 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.859533 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.859592 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.859602 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.859618 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.859630 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:04Z","lastTransitionTime":"2025-12-13T06:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.872193 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.916096 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.954229 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.962087 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.962146 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.962156 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.962173 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.962182 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:04Z","lastTransitionTime":"2025-12-13T06:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:04 crc kubenswrapper[4644]: I1213 06:46:04.993986 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:04Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.033801 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.064536 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.064584 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.064593 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.064608 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.064616 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:05Z","lastTransitionTime":"2025-12-13T06:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.167125 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.167232 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.167244 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.167275 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.167285 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:05Z","lastTransitionTime":"2025-12-13T06:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.269598 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.269635 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.269643 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.269665 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.269674 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:05Z","lastTransitionTime":"2025-12-13T06:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.372285 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.372321 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.372330 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.372342 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.372350 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:05Z","lastTransitionTime":"2025-12-13T06:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.388554 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.388584 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.388606 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:05 crc kubenswrapper[4644]: E1213 06:46:05.388687 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:05 crc kubenswrapper[4644]: E1213 06:46:05.388783 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:05 crc kubenswrapper[4644]: E1213 06:46:05.388847 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.475160 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.475213 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.475223 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.475239 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.475250 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:05Z","lastTransitionTime":"2025-12-13T06:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.567087 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovnkube-controller/0.log" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.570133 4644 generic.go:334] "Generic (PLEG): container finished" podID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerID="aaa94653d9aa50e0db74f81acb09aafadb1d6a3683cd74a7cfd9e493db04ae24" exitCode=1 Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.570212 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerDied","Data":"aaa94653d9aa50e0db74f81acb09aafadb1d6a3683cd74a7cfd9e493db04ae24"} Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.570837 4644 scope.go:117] "RemoveContainer" containerID="aaa94653d9aa50e0db74f81acb09aafadb1d6a3683cd74a7cfd9e493db04ae24" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.577461 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.577493 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.577501 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.577516 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.577525 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:05Z","lastTransitionTime":"2025-12-13T06:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.581798 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.592911 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.607187 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.618474 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.630836 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.643417 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.654700 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.664740 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.676820 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.679651 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.679675 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.679683 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.679697 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.679708 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:05Z","lastTransitionTime":"2025-12-13T06:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.690262 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.703589 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aaa94653d9aa50e0db74f81acb09aafadb1d6a3683cd74a7cfd9e493db04ae24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa94653d9aa50e0db74f81acb09aafadb1d6a3683cd74a7cfd9e493db04ae24\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:05Z\\\",\\\"message\\\":\\\"nding *v1.Namespace event handler 5 for removal\\\\nI1213 06:46:05.262525 5987 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1213 06:46:05.262554 5987 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1213 06:46:05.262558 5987 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1213 06:46:05.262560 5987 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1213 06:46:05.262576 5987 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1213 06:46:05.262607 5987 factory.go:656] Stopping watch factory\\\\nI1213 06:46:05.262622 5987 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1213 06:46:05.262629 5987 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1213 06:46:05.262636 5987 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1213 06:46:05.262641 5987 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1213 06:46:05.262646 5987 handler.go:208] Removed *v1.Node event handler 2\\\\nI1213 06:46:05.262651 5987 handler.go:208] Removed *v1.Node event handler 7\\\\nI1213 06:46:05.262733 5987 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.719811 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.730018 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.740159 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.748794 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:05Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.782346 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.782382 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.782391 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.782404 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.782414 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:05Z","lastTransitionTime":"2025-12-13T06:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.884948 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.884990 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.884998 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.885013 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.885025 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:05Z","lastTransitionTime":"2025-12-13T06:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.987081 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.987118 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.987126 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.987139 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:05 crc kubenswrapper[4644]: I1213 06:46:05.987163 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:05Z","lastTransitionTime":"2025-12-13T06:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.089571 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.089816 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.089887 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.089959 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.090017 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:06Z","lastTransitionTime":"2025-12-13T06:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.191990 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.192269 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.192278 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.192294 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.192305 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:06Z","lastTransitionTime":"2025-12-13T06:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.294583 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.294630 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.294642 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.294657 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.294667 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:06Z","lastTransitionTime":"2025-12-13T06:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.396404 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.396465 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.396475 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.396489 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.396500 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:06Z","lastTransitionTime":"2025-12-13T06:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.498661 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.498711 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.498721 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.498737 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.498747 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:06Z","lastTransitionTime":"2025-12-13T06:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.575079 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovnkube-controller/1.log" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.575565 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovnkube-controller/0.log" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.577846 4644 generic.go:334] "Generic (PLEG): container finished" podID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerID="691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a" exitCode=1 Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.577891 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerDied","Data":"691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a"} Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.577960 4644 scope.go:117] "RemoveContainer" containerID="aaa94653d9aa50e0db74f81acb09aafadb1d6a3683cd74a7cfd9e493db04ae24" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.578474 4644 scope.go:117] "RemoveContainer" containerID="691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a" Dec 13 06:46:06 crc kubenswrapper[4644]: E1213 06:46:06.578617 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.589924 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.601182 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.601220 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.601229 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.601244 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.601257 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:06Z","lastTransitionTime":"2025-12-13T06:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.605678 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa94653d9aa50e0db74f81acb09aafadb1d6a3683cd74a7cfd9e493db04ae24\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:05Z\\\",\\\"message\\\":\\\"nding *v1.Namespace event handler 5 for removal\\\\nI1213 06:46:05.262525 5987 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1213 06:46:05.262554 5987 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1213 06:46:05.262558 5987 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1213 06:46:05.262560 5987 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1213 06:46:05.262576 5987 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1213 06:46:05.262607 5987 factory.go:656] Stopping watch factory\\\\nI1213 06:46:05.262622 5987 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1213 06:46:05.262629 5987 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1213 06:46:05.262636 5987 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1213 06:46:05.262641 5987 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1213 06:46:05.262646 5987 handler.go:208] Removed *v1.Node event handler 2\\\\nI1213 06:46:05.262651 5987 handler.go:208] Removed *v1.Node event handler 7\\\\nI1213 06:46:05.262733 5987 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"message\\\":\\\"6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183869 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1213 06:46:06.183871 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183875 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183879 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183881 6112 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-ncsgn in node crc\\\\nI1213 06:46:06.183884 6112 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1213 06:46:06.183886 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn after 0 failed attempt(s)\\\\nI1213 06:46:06.183888 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1213 06:46:06.183867 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.618343 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.627778 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.637491 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.645884 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.661409 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.669813 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.678203 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.685600 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.694033 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.703108 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.703176 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.703186 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.703207 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.703218 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:06Z","lastTransitionTime":"2025-12-13T06:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.704182 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.714042 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.722273 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.730375 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.805367 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.805406 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.805417 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.805434 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.805459 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:06Z","lastTransitionTime":"2025-12-13T06:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.907969 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.908007 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.908016 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.908029 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.908038 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:06Z","lastTransitionTime":"2025-12-13T06:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.929959 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.940844 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.951464 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.960416 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.974752 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.983508 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.992863 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:06 crc kubenswrapper[4644]: I1213 06:46:06.999750 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:06Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.006744 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.009601 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.009635 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.009644 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.009659 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.009669 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:07Z","lastTransitionTime":"2025-12-13T06:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.017540 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.026972 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.035941 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.045299 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.055230 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.069512 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa94653d9aa50e0db74f81acb09aafadb1d6a3683cd74a7cfd9e493db04ae24\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:05Z\\\",\\\"message\\\":\\\"nding *v1.Namespace event handler 5 for removal\\\\nI1213 06:46:05.262525 5987 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1213 06:46:05.262554 5987 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1213 06:46:05.262558 5987 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1213 06:46:05.262560 5987 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1213 06:46:05.262576 5987 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1213 06:46:05.262607 5987 factory.go:656] Stopping watch factory\\\\nI1213 06:46:05.262622 5987 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1213 06:46:05.262629 5987 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1213 06:46:05.262636 5987 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1213 06:46:05.262641 5987 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1213 06:46:05.262646 5987 handler.go:208] Removed *v1.Node event handler 2\\\\nI1213 06:46:05.262651 5987 handler.go:208] Removed *v1.Node event handler 7\\\\nI1213 06:46:05.262733 5987 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"message\\\":\\\"6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183869 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1213 06:46:06.183871 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183875 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183879 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183881 6112 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-ncsgn in node crc\\\\nI1213 06:46:06.183884 6112 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1213 06:46:06.183886 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn after 0 failed attempt(s)\\\\nI1213 06:46:06.183888 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1213 06:46:06.183867 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.085728 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.112696 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.112747 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.112758 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.112774 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.112785 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:07Z","lastTransitionTime":"2025-12-13T06:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.215518 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.215564 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.215574 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.215591 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.215601 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:07Z","lastTransitionTime":"2025-12-13T06:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.318340 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.318387 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.318397 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.318414 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.318426 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:07Z","lastTransitionTime":"2025-12-13T06:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.388747 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.388784 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.388747 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:07 crc kubenswrapper[4644]: E1213 06:46:07.388870 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:07 crc kubenswrapper[4644]: E1213 06:46:07.388920 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:07 crc kubenswrapper[4644]: E1213 06:46:07.389013 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.420034 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.420068 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.420077 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.420091 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.420100 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:07Z","lastTransitionTime":"2025-12-13T06:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.523114 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.523178 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.523188 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.523206 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.523217 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:07Z","lastTransitionTime":"2025-12-13T06:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.583529 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovnkube-controller/1.log" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.587486 4644 scope.go:117] "RemoveContainer" containerID="691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a" Dec 13 06:46:07 crc kubenswrapper[4644]: E1213 06:46:07.587678 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.604533 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"message\\\":\\\"6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183869 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1213 06:46:06.183871 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183875 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183879 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183881 6112 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-ncsgn in node crc\\\\nI1213 06:46:06.183884 6112 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1213 06:46:06.183886 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn after 0 failed attempt(s)\\\\nI1213 06:46:06.183888 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1213 06:46:06.183867 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.620636 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.625538 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.625606 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.625617 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.625635 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.625644 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:07Z","lastTransitionTime":"2025-12-13T06:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.633120 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.650990 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.671835 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.691118 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.703714 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.716151 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.725766 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.727510 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.727552 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.727563 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.727578 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.727588 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:07Z","lastTransitionTime":"2025-12-13T06:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.737054 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.754097 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.772047 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.782573 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.793620 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.804709 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:07Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.829542 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.829597 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.829609 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.829625 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.829634 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:07Z","lastTransitionTime":"2025-12-13T06:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.932912 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.932968 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.932981 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.932999 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:07 crc kubenswrapper[4644]: I1213 06:46:07.933315 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:07Z","lastTransitionTime":"2025-12-13T06:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.035745 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.035775 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.035783 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.035795 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.035806 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:08Z","lastTransitionTime":"2025-12-13T06:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.138373 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.138405 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.138415 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.138428 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.138458 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:08Z","lastTransitionTime":"2025-12-13T06:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.239983 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.240014 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.240023 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.240038 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.240046 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:08Z","lastTransitionTime":"2025-12-13T06:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.342374 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.342411 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.342419 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.342435 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.342470 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:08Z","lastTransitionTime":"2025-12-13T06:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.400217 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.411537 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.426272 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.436969 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.445001 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.445031 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.445041 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.445056 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.445065 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:08Z","lastTransitionTime":"2025-12-13T06:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.449466 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.459782 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.473797 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"message\\\":\\\"6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183869 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1213 06:46:06.183871 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183875 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183879 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183881 6112 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-ncsgn in node crc\\\\nI1213 06:46:06.183884 6112 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1213 06:46:06.183886 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn after 0 failed attempt(s)\\\\nI1213 06:46:06.183888 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1213 06:46:06.183867 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.483722 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.493977 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.504726 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.515205 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.524728 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.533113 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.546753 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.546790 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.546800 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.546817 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.546828 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:08Z","lastTransitionTime":"2025-12-13T06:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.548603 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.559066 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.649013 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.649071 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.649082 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.649102 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.649119 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:08Z","lastTransitionTime":"2025-12-13T06:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.752325 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.752373 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.752387 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.752404 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.752416 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:08Z","lastTransitionTime":"2025-12-13T06:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.787764 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.787821 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.787831 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.787848 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.787878 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:08Z","lastTransitionTime":"2025-12-13T06:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: E1213 06:46:08.802062 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.806681 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.806735 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.806745 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.806758 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.806783 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:08Z","lastTransitionTime":"2025-12-13T06:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: E1213 06:46:08.823015 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.826688 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.826719 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.826729 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.826744 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.826755 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:08Z","lastTransitionTime":"2025-12-13T06:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: E1213 06:46:08.836875 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.840291 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.840323 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.840333 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.840351 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.840361 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:08Z","lastTransitionTime":"2025-12-13T06:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: E1213 06:46:08.853371 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.857337 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.857383 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.857395 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.857413 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.857424 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:08Z","lastTransitionTime":"2025-12-13T06:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: E1213 06:46:08.868373 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: E1213 06:46:08.868540 4644 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.870054 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.870093 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.870105 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.870122 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.870132 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:08Z","lastTransitionTime":"2025-12-13T06:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.952587 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb"] Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.953037 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.954548 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.955555 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.971243 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.972270 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.972302 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.972310 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.972333 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.972343 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:08Z","lastTransitionTime":"2025-12-13T06:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.981480 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:08 crc kubenswrapper[4644]: I1213 06:46:08.991644 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:08Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.001946 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.010606 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.020728 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.031337 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.041701 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.050363 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.060665 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.070654 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.074027 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.074063 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.074073 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.074087 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.074101 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:09Z","lastTransitionTime":"2025-12-13T06:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.084920 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"message\\\":\\\"6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183869 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1213 06:46:06.183871 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183875 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183879 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183881 6112 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-ncsgn in node crc\\\\nI1213 06:46:06.183884 6112 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1213 06:46:06.183886 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn after 0 failed attempt(s)\\\\nI1213 06:46:06.183888 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1213 06:46:06.183867 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.095894 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.105965 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.116028 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.124537 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.138416 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b95dba08-4180-4513-a763-1c3ac04e7090-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-9zcmb\" (UID: \"b95dba08-4180-4513-a763-1c3ac04e7090\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.138496 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b95dba08-4180-4513-a763-1c3ac04e7090-env-overrides\") pod \"ovnkube-control-plane-749d76644c-9zcmb\" (UID: \"b95dba08-4180-4513-a763-1c3ac04e7090\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.138554 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqx2f\" (UniqueName: \"kubernetes.io/projected/b95dba08-4180-4513-a763-1c3ac04e7090-kube-api-access-kqx2f\") pod \"ovnkube-control-plane-749d76644c-9zcmb\" (UID: \"b95dba08-4180-4513-a763-1c3ac04e7090\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.138601 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b95dba08-4180-4513-a763-1c3ac04e7090-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-9zcmb\" (UID: \"b95dba08-4180-4513-a763-1c3ac04e7090\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.177079 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.177112 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.177122 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.177141 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.177153 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:09Z","lastTransitionTime":"2025-12-13T06:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.239508 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b95dba08-4180-4513-a763-1c3ac04e7090-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-9zcmb\" (UID: \"b95dba08-4180-4513-a763-1c3ac04e7090\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.239578 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b95dba08-4180-4513-a763-1c3ac04e7090-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-9zcmb\" (UID: \"b95dba08-4180-4513-a763-1c3ac04e7090\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.239605 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b95dba08-4180-4513-a763-1c3ac04e7090-env-overrides\") pod \"ovnkube-control-plane-749d76644c-9zcmb\" (UID: \"b95dba08-4180-4513-a763-1c3ac04e7090\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.239627 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqx2f\" (UniqueName: \"kubernetes.io/projected/b95dba08-4180-4513-a763-1c3ac04e7090-kube-api-access-kqx2f\") pod \"ovnkube-control-plane-749d76644c-9zcmb\" (UID: \"b95dba08-4180-4513-a763-1c3ac04e7090\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.240672 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b95dba08-4180-4513-a763-1c3ac04e7090-env-overrides\") pod \"ovnkube-control-plane-749d76644c-9zcmb\" (UID: \"b95dba08-4180-4513-a763-1c3ac04e7090\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.240716 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b95dba08-4180-4513-a763-1c3ac04e7090-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-9zcmb\" (UID: \"b95dba08-4180-4513-a763-1c3ac04e7090\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.246145 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b95dba08-4180-4513-a763-1c3ac04e7090-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-9zcmb\" (UID: \"b95dba08-4180-4513-a763-1c3ac04e7090\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.257169 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqx2f\" (UniqueName: \"kubernetes.io/projected/b95dba08-4180-4513-a763-1c3ac04e7090-kube-api-access-kqx2f\") pod \"ovnkube-control-plane-749d76644c-9zcmb\" (UID: \"b95dba08-4180-4513-a763-1c3ac04e7090\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.263128 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" Dec 13 06:46:09 crc kubenswrapper[4644]: W1213 06:46:09.278579 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb95dba08_4180_4513_a763_1c3ac04e7090.slice/crio-244516f6f112127cc1796d0c89769dada4cedb6904dfae41f624657ff926eefd WatchSource:0}: Error finding container 244516f6f112127cc1796d0c89769dada4cedb6904dfae41f624657ff926eefd: Status 404 returned error can't find the container with id 244516f6f112127cc1796d0c89769dada4cedb6904dfae41f624657ff926eefd Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.280012 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.280065 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.280080 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.280105 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.280120 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:09Z","lastTransitionTime":"2025-12-13T06:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.382482 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.382521 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.382530 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.382543 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.382554 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:09Z","lastTransitionTime":"2025-12-13T06:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.388755 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.388771 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.388810 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:09 crc kubenswrapper[4644]: E1213 06:46:09.388872 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:09 crc kubenswrapper[4644]: E1213 06:46:09.388921 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:09 crc kubenswrapper[4644]: E1213 06:46:09.388981 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.485634 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.485678 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.485686 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.485703 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.485714 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:09Z","lastTransitionTime":"2025-12-13T06:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.588703 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.588748 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.588757 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.588773 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.588782 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:09Z","lastTransitionTime":"2025-12-13T06:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.592646 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" event={"ID":"b95dba08-4180-4513-a763-1c3ac04e7090","Type":"ContainerStarted","Data":"9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d"} Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.592713 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" event={"ID":"b95dba08-4180-4513-a763-1c3ac04e7090","Type":"ContainerStarted","Data":"7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e"} Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.592731 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" event={"ID":"b95dba08-4180-4513-a763-1c3ac04e7090","Type":"ContainerStarted","Data":"244516f6f112127cc1796d0c89769dada4cedb6904dfae41f624657ff926eefd"} Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.608065 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.620207 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.630089 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.639392 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.649461 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.664260 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.674008 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.684580 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.691765 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.691823 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.691834 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.691855 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.691869 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:09Z","lastTransitionTime":"2025-12-13T06:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.694767 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.705934 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.716407 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.729997 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.741539 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.751125 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.764913 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"message\\\":\\\"6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183869 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1213 06:46:06.183871 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183875 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183879 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183881 6112 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-ncsgn in node crc\\\\nI1213 06:46:06.183884 6112 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1213 06:46:06.183886 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn after 0 failed attempt(s)\\\\nI1213 06:46:06.183888 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1213 06:46:06.183867 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.776889 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:09Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.794015 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.794051 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.794062 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.794080 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.794091 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:09Z","lastTransitionTime":"2025-12-13T06:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.896131 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.896184 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.896207 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.896223 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.896232 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:09Z","lastTransitionTime":"2025-12-13T06:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.999081 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.999126 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.999136 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.999153 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:09 crc kubenswrapper[4644]: I1213 06:46:09.999164 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:09Z","lastTransitionTime":"2025-12-13T06:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.101886 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.101951 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.101962 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.101982 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.101994 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:10Z","lastTransitionTime":"2025-12-13T06:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.204925 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.204996 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.205007 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.205026 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.205038 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:10Z","lastTransitionTime":"2025-12-13T06:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.307348 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.307387 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.307395 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.307411 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.307421 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:10Z","lastTransitionTime":"2025-12-13T06:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.371067 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-c88wl"] Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.371751 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:10 crc kubenswrapper[4644]: E1213 06:46:10.371836 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.386077 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.396844 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.405386 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.409333 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.409367 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.409376 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.409390 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.409400 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:10Z","lastTransitionTime":"2025-12-13T06:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.413137 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.420862 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.428826 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.436834 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c88wl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae945e75-99b5-40b6-851d-dc9348056cdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:10Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c88wl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.446823 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.454818 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.462854 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.471626 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.485294 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.497828 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"message\\\":\\\"6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183869 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1213 06:46:06.183871 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183875 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183879 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183881 6112 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-ncsgn in node crc\\\\nI1213 06:46:06.183884 6112 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1213 06:46:06.183886 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn after 0 failed attempt(s)\\\\nI1213 06:46:06.183888 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1213 06:46:06.183867 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.507462 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.511755 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.511790 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.511799 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.511815 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.511824 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:10Z","lastTransitionTime":"2025-12-13T06:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.518351 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.525675 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.533407 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:10Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.552869 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs\") pod \"network-metrics-daemon-c88wl\" (UID: \"ae945e75-99b5-40b6-851d-dc9348056cdb\") " pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.552932 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t97gb\" (UniqueName: \"kubernetes.io/projected/ae945e75-99b5-40b6-851d-dc9348056cdb-kube-api-access-t97gb\") pod \"network-metrics-daemon-c88wl\" (UID: \"ae945e75-99b5-40b6-851d-dc9348056cdb\") " pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.614103 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.614140 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.614150 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.614170 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.614183 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:10Z","lastTransitionTime":"2025-12-13T06:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.653910 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs\") pod \"network-metrics-daemon-c88wl\" (UID: \"ae945e75-99b5-40b6-851d-dc9348056cdb\") " pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.653978 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t97gb\" (UniqueName: \"kubernetes.io/projected/ae945e75-99b5-40b6-851d-dc9348056cdb-kube-api-access-t97gb\") pod \"network-metrics-daemon-c88wl\" (UID: \"ae945e75-99b5-40b6-851d-dc9348056cdb\") " pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:10 crc kubenswrapper[4644]: E1213 06:46:10.654113 4644 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:46:10 crc kubenswrapper[4644]: E1213 06:46:10.654195 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs podName:ae945e75-99b5-40b6-851d-dc9348056cdb nodeName:}" failed. No retries permitted until 2025-12-13 06:46:11.154176652 +0000 UTC m=+33.369127485 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs") pod "network-metrics-daemon-c88wl" (UID: "ae945e75-99b5-40b6-851d-dc9348056cdb") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.675437 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t97gb\" (UniqueName: \"kubernetes.io/projected/ae945e75-99b5-40b6-851d-dc9348056cdb-kube-api-access-t97gb\") pod \"network-metrics-daemon-c88wl\" (UID: \"ae945e75-99b5-40b6-851d-dc9348056cdb\") " pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.717031 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.717345 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.717357 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.717376 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.717388 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:10Z","lastTransitionTime":"2025-12-13T06:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.819565 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.819601 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.819609 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.819625 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.819635 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:10Z","lastTransitionTime":"2025-12-13T06:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.921867 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.921915 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.921925 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.921942 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:10 crc kubenswrapper[4644]: I1213 06:46:10.921953 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:10Z","lastTransitionTime":"2025-12-13T06:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.023774 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.023805 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.023815 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.023831 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.023840 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:11Z","lastTransitionTime":"2025-12-13T06:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.126260 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.126322 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.126332 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.126348 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.126358 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:11Z","lastTransitionTime":"2025-12-13T06:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.158853 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs\") pod \"network-metrics-daemon-c88wl\" (UID: \"ae945e75-99b5-40b6-851d-dc9348056cdb\") " pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.159024 4644 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.159078 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs podName:ae945e75-99b5-40b6-851d-dc9348056cdb nodeName:}" failed. No retries permitted until 2025-12-13 06:46:12.159064469 +0000 UTC m=+34.374015302 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs") pod "network-metrics-daemon-c88wl" (UID: "ae945e75-99b5-40b6-851d-dc9348056cdb") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.228225 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.228277 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.228289 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.228304 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.228316 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:11Z","lastTransitionTime":"2025-12-13T06:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.259863 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.259973 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.260004 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.260019 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:46:27.26000144 +0000 UTC m=+49.474952273 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.260051 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.260078 4644 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.260125 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.260148 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.260162 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:27.26015407 +0000 UTC m=+49.475104903 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.260171 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.260182 4644 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.260231 4644 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.260235 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:27.260206169 +0000 UTC m=+49.475157002 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.260284 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:27.260275259 +0000 UTC m=+49.475226092 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.260236 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.260301 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.260309 4644 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.260335 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:27.260329461 +0000 UTC m=+49.475280295 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.330607 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.330646 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.330655 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.330668 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.330680 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:11Z","lastTransitionTime":"2025-12-13T06:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.388594 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.388696 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.388712 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.388766 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.389065 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:11 crc kubenswrapper[4644]: E1213 06:46:11.389130 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.433589 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.433635 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.433645 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.433661 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.433672 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:11Z","lastTransitionTime":"2025-12-13T06:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.535345 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.535389 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.535398 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.535414 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.535424 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:11Z","lastTransitionTime":"2025-12-13T06:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.638003 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.638041 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.638050 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.638066 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.638075 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:11Z","lastTransitionTime":"2025-12-13T06:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.739955 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.739991 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.740000 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.740014 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.740024 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:11Z","lastTransitionTime":"2025-12-13T06:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.842505 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.842554 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.842566 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.842583 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.842594 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:11Z","lastTransitionTime":"2025-12-13T06:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.944650 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.944693 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.944703 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.944719 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:11 crc kubenswrapper[4644]: I1213 06:46:11.944731 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:11Z","lastTransitionTime":"2025-12-13T06:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.047263 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.047322 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.047337 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.047359 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.047372 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:12Z","lastTransitionTime":"2025-12-13T06:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.149515 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.149556 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.149566 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.149579 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.149588 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:12Z","lastTransitionTime":"2025-12-13T06:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.170088 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs\") pod \"network-metrics-daemon-c88wl\" (UID: \"ae945e75-99b5-40b6-851d-dc9348056cdb\") " pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:12 crc kubenswrapper[4644]: E1213 06:46:12.170214 4644 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:46:12 crc kubenswrapper[4644]: E1213 06:46:12.170293 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs podName:ae945e75-99b5-40b6-851d-dc9348056cdb nodeName:}" failed. No retries permitted until 2025-12-13 06:46:14.170274969 +0000 UTC m=+36.385225802 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs") pod "network-metrics-daemon-c88wl" (UID: "ae945e75-99b5-40b6-851d-dc9348056cdb") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.251869 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.251907 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.251938 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.251954 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.251963 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:12Z","lastTransitionTime":"2025-12-13T06:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.354560 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.354613 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.354625 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.354642 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.354653 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:12Z","lastTransitionTime":"2025-12-13T06:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.389148 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:12 crc kubenswrapper[4644]: E1213 06:46:12.389312 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.457568 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.457613 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.457622 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.457639 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.457649 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:12Z","lastTransitionTime":"2025-12-13T06:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.559846 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.559885 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.559895 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.559908 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.559917 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:12Z","lastTransitionTime":"2025-12-13T06:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.661883 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.661935 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.661946 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.661965 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.661975 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:12Z","lastTransitionTime":"2025-12-13T06:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.764418 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.764480 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.764495 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.764512 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.764521 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:12Z","lastTransitionTime":"2025-12-13T06:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.866764 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.866816 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.866827 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.866846 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.866856 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:12Z","lastTransitionTime":"2025-12-13T06:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.969375 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.969419 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.969430 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.969464 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:12 crc kubenswrapper[4644]: I1213 06:46:12.969475 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:12Z","lastTransitionTime":"2025-12-13T06:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.072169 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.072202 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.072210 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.072223 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.072244 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:13Z","lastTransitionTime":"2025-12-13T06:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.174598 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.174634 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.174646 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.174661 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.174671 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:13Z","lastTransitionTime":"2025-12-13T06:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.277917 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.277967 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.277978 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.277997 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.278009 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:13Z","lastTransitionTime":"2025-12-13T06:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.379951 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.379993 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.380004 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.380017 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.380027 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:13Z","lastTransitionTime":"2025-12-13T06:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.388462 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.388482 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.388532 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:13 crc kubenswrapper[4644]: E1213 06:46:13.388650 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:13 crc kubenswrapper[4644]: E1213 06:46:13.388760 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:13 crc kubenswrapper[4644]: E1213 06:46:13.388852 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.482431 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.482494 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.482504 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.482524 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.482536 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:13Z","lastTransitionTime":"2025-12-13T06:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.584938 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.584987 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.584998 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.585017 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.585028 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:13Z","lastTransitionTime":"2025-12-13T06:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.686908 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.686947 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.686956 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.686969 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.686980 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:13Z","lastTransitionTime":"2025-12-13T06:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.789186 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.789222 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.789231 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.789255 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.789264 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:13Z","lastTransitionTime":"2025-12-13T06:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.891591 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.891649 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.891661 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.891682 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.891693 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:13Z","lastTransitionTime":"2025-12-13T06:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.994076 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.994164 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.994174 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.994217 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:13 crc kubenswrapper[4644]: I1213 06:46:13.994237 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:13Z","lastTransitionTime":"2025-12-13T06:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.096382 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.096461 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.096472 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.096489 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.096500 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:14Z","lastTransitionTime":"2025-12-13T06:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.189739 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs\") pod \"network-metrics-daemon-c88wl\" (UID: \"ae945e75-99b5-40b6-851d-dc9348056cdb\") " pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:14 crc kubenswrapper[4644]: E1213 06:46:14.189859 4644 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:46:14 crc kubenswrapper[4644]: E1213 06:46:14.189911 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs podName:ae945e75-99b5-40b6-851d-dc9348056cdb nodeName:}" failed. No retries permitted until 2025-12-13 06:46:18.189896881 +0000 UTC m=+40.404847714 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs") pod "network-metrics-daemon-c88wl" (UID: "ae945e75-99b5-40b6-851d-dc9348056cdb") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.198236 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.198290 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.198303 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.198318 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.198327 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:14Z","lastTransitionTime":"2025-12-13T06:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.300848 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.300884 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.300904 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.300919 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.300927 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:14Z","lastTransitionTime":"2025-12-13T06:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.388578 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:14 crc kubenswrapper[4644]: E1213 06:46:14.388699 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.402684 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.402711 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.402721 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.402732 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.402741 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:14Z","lastTransitionTime":"2025-12-13T06:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.504973 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.505032 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.505041 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.505056 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.505065 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:14Z","lastTransitionTime":"2025-12-13T06:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.606437 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.606484 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.606507 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.606519 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.606527 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:14Z","lastTransitionTime":"2025-12-13T06:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.708225 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.708260 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.708269 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.708281 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.708289 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:14Z","lastTransitionTime":"2025-12-13T06:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.810133 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.810162 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.810171 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.810182 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.810190 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:14Z","lastTransitionTime":"2025-12-13T06:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.912341 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.912377 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.912385 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.912399 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:14 crc kubenswrapper[4644]: I1213 06:46:14.912408 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:14Z","lastTransitionTime":"2025-12-13T06:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.014167 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.014201 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.014210 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.014223 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.014231 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:15Z","lastTransitionTime":"2025-12-13T06:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.116699 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.116734 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.116758 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.116773 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.116783 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:15Z","lastTransitionTime":"2025-12-13T06:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.219001 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.219040 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.219050 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.219065 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.219074 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:15Z","lastTransitionTime":"2025-12-13T06:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.321924 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.321962 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.321971 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.321986 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.321994 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:15Z","lastTransitionTime":"2025-12-13T06:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.388648 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.388736 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:15 crc kubenswrapper[4644]: E1213 06:46:15.388784 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.388738 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:15 crc kubenswrapper[4644]: E1213 06:46:15.388894 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:15 crc kubenswrapper[4644]: E1213 06:46:15.389013 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.423549 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.423584 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.423592 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.423604 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.423614 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:15Z","lastTransitionTime":"2025-12-13T06:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.526147 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.526207 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.526216 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.526231 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.526242 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:15Z","lastTransitionTime":"2025-12-13T06:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.628231 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.628297 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.628308 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.628325 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.628337 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:15Z","lastTransitionTime":"2025-12-13T06:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.730607 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.730648 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.730656 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.730676 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.730688 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:15Z","lastTransitionTime":"2025-12-13T06:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.833615 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.833692 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.833706 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.833731 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.833746 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:15Z","lastTransitionTime":"2025-12-13T06:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.936108 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.936153 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.936162 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.936178 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:15 crc kubenswrapper[4644]: I1213 06:46:15.936188 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:15Z","lastTransitionTime":"2025-12-13T06:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.038800 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.038857 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.038894 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.038916 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.038928 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:16Z","lastTransitionTime":"2025-12-13T06:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.140903 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.140943 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.140952 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.140966 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.140976 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:16Z","lastTransitionTime":"2025-12-13T06:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.243257 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.243340 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.243349 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.243368 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.243380 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:16Z","lastTransitionTime":"2025-12-13T06:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.347063 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.347119 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.347128 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.347153 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.347169 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:16Z","lastTransitionTime":"2025-12-13T06:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.388802 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:16 crc kubenswrapper[4644]: E1213 06:46:16.388968 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.450043 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.450091 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.450100 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.450117 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.450129 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:16Z","lastTransitionTime":"2025-12-13T06:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.552294 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.552344 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.552354 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.552373 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.552384 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:16Z","lastTransitionTime":"2025-12-13T06:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.654634 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.654673 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.654687 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.654703 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.654714 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:16Z","lastTransitionTime":"2025-12-13T06:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.757076 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.757123 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.757133 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.757149 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.757159 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:16Z","lastTransitionTime":"2025-12-13T06:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.859637 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.859680 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.859690 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.859706 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.859716 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:16Z","lastTransitionTime":"2025-12-13T06:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.962240 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.962276 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.962302 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.962316 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:16 crc kubenswrapper[4644]: I1213 06:46:16.962326 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:16Z","lastTransitionTime":"2025-12-13T06:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.064648 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.064689 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.064699 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.064714 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.064725 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:17Z","lastTransitionTime":"2025-12-13T06:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.166840 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.166885 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.166895 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.166911 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.166920 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:17Z","lastTransitionTime":"2025-12-13T06:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.269587 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.269634 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.269643 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.269658 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.269667 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:17Z","lastTransitionTime":"2025-12-13T06:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.372266 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.372341 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.372354 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.372385 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.372400 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:17Z","lastTransitionTime":"2025-12-13T06:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.388561 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:17 crc kubenswrapper[4644]: E1213 06:46:17.388671 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.388725 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:17 crc kubenswrapper[4644]: E1213 06:46:17.388852 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.389048 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:17 crc kubenswrapper[4644]: E1213 06:46:17.389161 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.475460 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.475833 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.475950 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.476076 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.476150 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:17Z","lastTransitionTime":"2025-12-13T06:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.578840 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.578882 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.578891 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.578910 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.578924 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:17Z","lastTransitionTime":"2025-12-13T06:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.681572 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.681617 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.681628 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.681644 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.681656 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:17Z","lastTransitionTime":"2025-12-13T06:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.784011 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.784258 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.784392 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.784499 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.784561 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:17Z","lastTransitionTime":"2025-12-13T06:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.887717 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.887759 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.887768 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.887783 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.887796 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:17Z","lastTransitionTime":"2025-12-13T06:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.991035 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.991349 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.991433 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.991527 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:17 crc kubenswrapper[4644]: I1213 06:46:17.991608 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:17Z","lastTransitionTime":"2025-12-13T06:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.094594 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.094641 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.094649 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.094666 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.094677 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:18Z","lastTransitionTime":"2025-12-13T06:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.196717 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.196771 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.196781 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.196798 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.196808 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:18Z","lastTransitionTime":"2025-12-13T06:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.227541 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs\") pod \"network-metrics-daemon-c88wl\" (UID: \"ae945e75-99b5-40b6-851d-dc9348056cdb\") " pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:18 crc kubenswrapper[4644]: E1213 06:46:18.227693 4644 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:46:18 crc kubenswrapper[4644]: E1213 06:46:18.227759 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs podName:ae945e75-99b5-40b6-851d-dc9348056cdb nodeName:}" failed. No retries permitted until 2025-12-13 06:46:26.227740155 +0000 UTC m=+48.442690988 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs") pod "network-metrics-daemon-c88wl" (UID: "ae945e75-99b5-40b6-851d-dc9348056cdb") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.298554 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.298593 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.298601 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.298617 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.298630 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:18Z","lastTransitionTime":"2025-12-13T06:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.388840 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:18 crc kubenswrapper[4644]: E1213 06:46:18.388996 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.400750 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.400786 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.400796 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.400811 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.400823 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:18Z","lastTransitionTime":"2025-12-13T06:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.405984 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.416774 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.426877 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.435793 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.446191 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.457966 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.468894 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c88wl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae945e75-99b5-40b6-851d-dc9348056cdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:10Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c88wl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.493502 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.506194 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.506236 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.506245 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.506262 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.506271 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:18Z","lastTransitionTime":"2025-12-13T06:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.519512 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.539685 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.551315 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.562692 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.581160 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"message\\\":\\\"6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183869 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1213 06:46:06.183871 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183875 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183879 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183881 6112 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-ncsgn in node crc\\\\nI1213 06:46:06.183884 6112 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1213 06:46:06.183886 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn after 0 failed attempt(s)\\\\nI1213 06:46:06.183888 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1213 06:46:06.183867 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.597220 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.609182 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.609403 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.609568 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.609600 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.609618 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.609628 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:18Z","lastTransitionTime":"2025-12-13T06:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.619729 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.629544 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.712950 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.712999 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.713011 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.713030 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.713042 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:18Z","lastTransitionTime":"2025-12-13T06:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.815415 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.815477 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.815488 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.815505 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.815514 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:18Z","lastTransitionTime":"2025-12-13T06:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.918372 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.918415 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.918426 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.918464 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.918476 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:18Z","lastTransitionTime":"2025-12-13T06:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.989540 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.989578 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.989586 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.989602 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:18 crc kubenswrapper[4644]: I1213 06:46:18.989613 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:18Z","lastTransitionTime":"2025-12-13T06:46:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:19 crc kubenswrapper[4644]: E1213 06:46:19.000338 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:18Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.003504 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.003545 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.003555 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.003569 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.003579 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:19Z","lastTransitionTime":"2025-12-13T06:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:19 crc kubenswrapper[4644]: E1213 06:46:19.013040 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:19Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.017209 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.017241 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.017251 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.017264 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.017274 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:19Z","lastTransitionTime":"2025-12-13T06:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:19 crc kubenswrapper[4644]: E1213 06:46:19.027827 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:19Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.031139 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.031182 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.031193 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.031210 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.031221 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:19Z","lastTransitionTime":"2025-12-13T06:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:19 crc kubenswrapper[4644]: E1213 06:46:19.041381 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:19Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.044152 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.044177 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.044186 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.044199 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.044209 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:19Z","lastTransitionTime":"2025-12-13T06:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:19 crc kubenswrapper[4644]: E1213 06:46:19.053431 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:19Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:19 crc kubenswrapper[4644]: E1213 06:46:19.053580 4644 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.054625 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.054649 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.054657 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.054670 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.054698 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:19Z","lastTransitionTime":"2025-12-13T06:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.157226 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.157494 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.157609 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.157695 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.157757 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:19Z","lastTransitionTime":"2025-12-13T06:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.260046 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.260075 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.260082 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.260094 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.260103 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:19Z","lastTransitionTime":"2025-12-13T06:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.363566 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.363608 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.363618 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.363632 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.363645 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:19Z","lastTransitionTime":"2025-12-13T06:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.388889 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.388985 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.388992 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:19 crc kubenswrapper[4644]: E1213 06:46:19.389101 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:19 crc kubenswrapper[4644]: E1213 06:46:19.389182 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:19 crc kubenswrapper[4644]: E1213 06:46:19.389269 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.466892 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.466927 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.466936 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.466953 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.466963 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:19Z","lastTransitionTime":"2025-12-13T06:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.569297 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.569345 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.569356 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.569370 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.569380 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:19Z","lastTransitionTime":"2025-12-13T06:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.670700 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.670732 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.670740 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.670753 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.670761 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:19Z","lastTransitionTime":"2025-12-13T06:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.772081 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.772113 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.772123 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.772137 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.772147 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:19Z","lastTransitionTime":"2025-12-13T06:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.874715 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.874759 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.874768 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.874785 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.874796 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:19Z","lastTransitionTime":"2025-12-13T06:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.977748 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.977781 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.977790 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.977806 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:19 crc kubenswrapper[4644]: I1213 06:46:19.977816 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:19Z","lastTransitionTime":"2025-12-13T06:46:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.079829 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.079868 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.079877 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.079891 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.079900 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:20Z","lastTransitionTime":"2025-12-13T06:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.182430 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.182486 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.182495 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.182511 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.182521 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:20Z","lastTransitionTime":"2025-12-13T06:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.285274 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.285332 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.285341 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.285358 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.285369 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:20Z","lastTransitionTime":"2025-12-13T06:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.387946 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.388009 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.388018 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.388034 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.388044 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:20Z","lastTransitionTime":"2025-12-13T06:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.388371 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:20 crc kubenswrapper[4644]: E1213 06:46:20.388770 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.389021 4644 scope.go:117] "RemoveContainer" containerID="691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.490145 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.490374 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.490383 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.490398 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.490407 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:20Z","lastTransitionTime":"2025-12-13T06:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.593085 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.593124 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.593132 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.593145 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.593159 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:20Z","lastTransitionTime":"2025-12-13T06:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.623671 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovnkube-controller/1.log" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.626511 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerStarted","Data":"feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832"} Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.626658 4644 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.640632 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.659817 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.671401 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.686111 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.696065 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.696104 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.696113 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.696128 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.696140 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:20Z","lastTransitionTime":"2025-12-13T06:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.697036 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.706257 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c88wl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae945e75-99b5-40b6-851d-dc9348056cdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:10Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c88wl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.721384 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.731363 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.739856 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.749672 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.758811 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.772155 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"message\\\":\\\"6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183869 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1213 06:46:06.183871 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183875 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183879 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183881 6112 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-ncsgn in node crc\\\\nI1213 06:46:06.183884 6112 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1213 06:46:06.183886 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn after 0 failed attempt(s)\\\\nI1213 06:46:06.183888 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1213 06:46:06.183867 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.784236 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.794216 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.798121 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.798166 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.798174 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.798188 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.798199 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:20Z","lastTransitionTime":"2025-12-13T06:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.802577 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.810484 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.818558 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:20Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.900806 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.900841 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.900851 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.900866 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:20 crc kubenswrapper[4644]: I1213 06:46:20.900880 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:20Z","lastTransitionTime":"2025-12-13T06:46:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.002814 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.002861 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.002870 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.002885 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.002896 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:21Z","lastTransitionTime":"2025-12-13T06:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.105012 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.105048 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.105057 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.105072 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.105084 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:21Z","lastTransitionTime":"2025-12-13T06:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.207305 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.207357 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.207367 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.207383 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.207393 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:21Z","lastTransitionTime":"2025-12-13T06:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.309674 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.309733 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.309745 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.309764 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.309778 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:21Z","lastTransitionTime":"2025-12-13T06:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.388839 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.388903 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:21 crc kubenswrapper[4644]: E1213 06:46:21.388969 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.389037 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:21 crc kubenswrapper[4644]: E1213 06:46:21.389167 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:21 crc kubenswrapper[4644]: E1213 06:46:21.389262 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.411977 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.412012 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.412020 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.412033 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.412044 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:21Z","lastTransitionTime":"2025-12-13T06:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.514313 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.514379 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.514388 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.514402 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.514410 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:21Z","lastTransitionTime":"2025-12-13T06:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.616433 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.616487 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.616499 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.616513 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.616522 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:21Z","lastTransitionTime":"2025-12-13T06:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.629433 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovnkube-controller/2.log" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.630291 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovnkube-controller/1.log" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.632394 4644 generic.go:334] "Generic (PLEG): container finished" podID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerID="feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832" exitCode=1 Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.632433 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerDied","Data":"feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832"} Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.632491 4644 scope.go:117] "RemoveContainer" containerID="691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.633138 4644 scope.go:117] "RemoveContainer" containerID="feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832" Dec 13 06:46:21 crc kubenswrapper[4644]: E1213 06:46:21.633297 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.645534 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.659516 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://691faf727b01e57cd53196e6a4e3898c68b35c79f0568ca3978593d3344fc97a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"message\\\":\\\"6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183869 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1213 06:46:06.183871 6112 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183875 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn\\\\nI1213 06:46:06.183879 6112 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1213 06:46:06.183881 6112 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-ncsgn in node crc\\\\nI1213 06:46:06.183884 6112 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1213 06:46:06.183886 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-ncsgn after 0 failed attempt(s)\\\\nI1213 06:46:06.183888 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1213 06:46:06.183867 6112 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:21Z\\\",\\\"message\\\":\\\"ew object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1213 06:46:21.030765 6341 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1213 06:46:21.030772 6341 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1213 06:46:21.030779 6341 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nF1213 06:46:21.030782 6341 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z]\\\\nI1213 06:46:21.0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.669548 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.678010 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.688926 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.697703 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.705954 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.714301 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.718250 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.718285 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.718294 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.718309 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.718319 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:21Z","lastTransitionTime":"2025-12-13T06:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.722927 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.730387 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c88wl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae945e75-99b5-40b6-851d-dc9348056cdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:10Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c88wl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.744609 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.754842 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.764106 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.775073 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.784945 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.793746 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.802682 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.820409 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.820465 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.820477 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.820493 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.820501 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:21Z","lastTransitionTime":"2025-12-13T06:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.923003 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.923063 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.923074 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.923093 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:21 crc kubenswrapper[4644]: I1213 06:46:21.923116 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:21Z","lastTransitionTime":"2025-12-13T06:46:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.024992 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.025609 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.025641 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.025662 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.025674 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:22Z","lastTransitionTime":"2025-12-13T06:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.128127 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.128167 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.128175 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.128190 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.128200 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:22Z","lastTransitionTime":"2025-12-13T06:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.230403 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.230460 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.230469 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.230483 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.230492 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:22Z","lastTransitionTime":"2025-12-13T06:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.332530 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.332563 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.332572 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.332586 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.332595 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:22Z","lastTransitionTime":"2025-12-13T06:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.388603 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:22 crc kubenswrapper[4644]: E1213 06:46:22.388725 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.434606 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.434642 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.434651 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.434665 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.434676 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:22Z","lastTransitionTime":"2025-12-13T06:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.537292 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.537331 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.537339 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.537363 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.537372 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:22Z","lastTransitionTime":"2025-12-13T06:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.636748 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovnkube-controller/2.log" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.638647 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.638677 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.638685 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.638698 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.638726 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:22Z","lastTransitionTime":"2025-12-13T06:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.740756 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.740793 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.740802 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.740816 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.740827 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:22Z","lastTransitionTime":"2025-12-13T06:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.843131 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.843178 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.843187 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.843201 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.843211 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:22Z","lastTransitionTime":"2025-12-13T06:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.945865 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.945907 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.945915 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.945930 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:22 crc kubenswrapper[4644]: I1213 06:46:22.945939 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:22Z","lastTransitionTime":"2025-12-13T06:46:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.048287 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.048347 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.048370 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.048387 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.048397 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:23Z","lastTransitionTime":"2025-12-13T06:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.150438 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.150497 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.150507 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.150522 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.150531 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:23Z","lastTransitionTime":"2025-12-13T06:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.252764 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.252801 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.252810 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.252825 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.252841 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:23Z","lastTransitionTime":"2025-12-13T06:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.355348 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.355397 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.355408 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.355421 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.355430 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:23Z","lastTransitionTime":"2025-12-13T06:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.388222 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.388258 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.388284 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:23 crc kubenswrapper[4644]: E1213 06:46:23.388339 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:23 crc kubenswrapper[4644]: E1213 06:46:23.388455 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:23 crc kubenswrapper[4644]: E1213 06:46:23.388632 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.457096 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.457138 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.457151 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.457167 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.457176 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:23Z","lastTransitionTime":"2025-12-13T06:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.559256 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.559310 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.559322 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.559336 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.559344 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:23Z","lastTransitionTime":"2025-12-13T06:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.660951 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.660999 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.661009 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.661028 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.661039 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:23Z","lastTransitionTime":"2025-12-13T06:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.763196 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.763235 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.763247 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.763261 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.763273 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:23Z","lastTransitionTime":"2025-12-13T06:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.865875 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.865916 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.865924 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.865939 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.865948 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:23Z","lastTransitionTime":"2025-12-13T06:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.968200 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.968271 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.968281 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.968296 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:23 crc kubenswrapper[4644]: I1213 06:46:23.968305 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:23Z","lastTransitionTime":"2025-12-13T06:46:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.014011 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.014733 4644 scope.go:117] "RemoveContainer" containerID="feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832" Dec 13 06:46:24 crc kubenswrapper[4644]: E1213 06:46:24.014885 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.026298 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.040460 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:21Z\\\",\\\"message\\\":\\\"ew object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1213 06:46:21.030765 6341 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1213 06:46:21.030772 6341 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1213 06:46:21.030779 6341 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nF1213 06:46:21.030782 6341 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z]\\\\nI1213 06:46:21.0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.051758 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.059950 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.068334 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.073347 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.073401 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.073417 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.073432 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.073465 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:24Z","lastTransitionTime":"2025-12-13T06:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.079553 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.088552 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.096060 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c88wl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae945e75-99b5-40b6-851d-dc9348056cdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:10Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c88wl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.109485 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.117626 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.126045 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.132750 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.139783 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.148942 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.157824 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.166061 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.174535 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:24Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.175838 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.175867 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.175877 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.175892 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.175903 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:24Z","lastTransitionTime":"2025-12-13T06:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.277852 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.277898 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.277910 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.277927 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.277936 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:24Z","lastTransitionTime":"2025-12-13T06:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.380058 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.380096 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.380106 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.380119 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.380131 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:24Z","lastTransitionTime":"2025-12-13T06:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.388528 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:24 crc kubenswrapper[4644]: E1213 06:46:24.388628 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.481842 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.481894 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.481905 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.481920 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.481931 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:24Z","lastTransitionTime":"2025-12-13T06:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.584502 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.584546 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.584555 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.584571 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.584581 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:24Z","lastTransitionTime":"2025-12-13T06:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.686463 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.686505 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.686517 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.686532 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.686543 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:24Z","lastTransitionTime":"2025-12-13T06:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.789277 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.789326 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.789335 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.789352 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.789362 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:24Z","lastTransitionTime":"2025-12-13T06:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.891225 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.891274 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.891283 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.891297 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.891307 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:24Z","lastTransitionTime":"2025-12-13T06:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.993316 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.993356 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.993364 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.993391 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:24 crc kubenswrapper[4644]: I1213 06:46:24.993400 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:24Z","lastTransitionTime":"2025-12-13T06:46:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.095337 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.095374 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.095402 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.095420 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.095428 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:25Z","lastTransitionTime":"2025-12-13T06:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.197678 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.197725 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.197733 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.197749 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.197764 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:25Z","lastTransitionTime":"2025-12-13T06:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.299492 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.299531 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.299539 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.299553 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.299561 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:25Z","lastTransitionTime":"2025-12-13T06:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.388690 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.388711 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.388739 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:25 crc kubenswrapper[4644]: E1213 06:46:25.388810 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:25 crc kubenswrapper[4644]: E1213 06:46:25.388865 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:25 crc kubenswrapper[4644]: E1213 06:46:25.388966 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.402206 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.402247 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.402256 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.402270 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.402282 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:25Z","lastTransitionTime":"2025-12-13T06:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.504841 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.504883 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.504892 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.504909 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.504919 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:25Z","lastTransitionTime":"2025-12-13T06:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.607113 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.607160 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.607169 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.607185 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.607202 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:25Z","lastTransitionTime":"2025-12-13T06:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.709250 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.709305 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.709316 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.709336 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.709349 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:25Z","lastTransitionTime":"2025-12-13T06:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.811704 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.811748 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.811758 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.811774 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.811785 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:25Z","lastTransitionTime":"2025-12-13T06:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.914427 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.914486 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.914496 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.914514 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:25 crc kubenswrapper[4644]: I1213 06:46:25.914525 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:25Z","lastTransitionTime":"2025-12-13T06:46:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.016465 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.016515 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.016526 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.016542 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.016554 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:26Z","lastTransitionTime":"2025-12-13T06:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.118557 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.118597 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.118606 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.118623 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.118633 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:26Z","lastTransitionTime":"2025-12-13T06:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.221230 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.221275 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.221284 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.221297 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.221307 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:26Z","lastTransitionTime":"2025-12-13T06:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.230150 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.238788 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.240728 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.256173 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:21Z\\\",\\\"message\\\":\\\"ew object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1213 06:46:21.030765 6341 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1213 06:46:21.030772 6341 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1213 06:46:21.030779 6341 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nF1213 06:46:21.030782 6341 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z]\\\\nI1213 06:46:21.0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.267557 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.277479 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.287378 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.296877 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.305137 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.306523 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs\") pod \"network-metrics-daemon-c88wl\" (UID: \"ae945e75-99b5-40b6-851d-dc9348056cdb\") " pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:26 crc kubenswrapper[4644]: E1213 06:46:26.306655 4644 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:46:26 crc kubenswrapper[4644]: E1213 06:46:26.306696 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs podName:ae945e75-99b5-40b6-851d-dc9348056cdb nodeName:}" failed. No retries permitted until 2025-12-13 06:46:42.306684861 +0000 UTC m=+64.521635693 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs") pod "network-metrics-daemon-c88wl" (UID: "ae945e75-99b5-40b6-851d-dc9348056cdb") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.314754 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.322858 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c88wl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae945e75-99b5-40b6-851d-dc9348056cdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:10Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c88wl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.323943 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.323976 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.323984 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.323998 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.324008 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:26Z","lastTransitionTime":"2025-12-13T06:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.337069 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.347689 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.357274 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.366934 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.378095 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.388474 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:26 crc kubenswrapper[4644]: E1213 06:46:26.388626 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.389020 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.399414 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.409561 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:26Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.425920 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.425952 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.425960 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.425974 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.425984 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:26Z","lastTransitionTime":"2025-12-13T06:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.528436 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.528498 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.528509 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.528525 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.528534 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:26Z","lastTransitionTime":"2025-12-13T06:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.630706 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.630933 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.631007 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.631077 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.631149 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:26Z","lastTransitionTime":"2025-12-13T06:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.733147 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.733189 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.733200 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.733214 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.733224 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:26Z","lastTransitionTime":"2025-12-13T06:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.835316 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.835585 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.835683 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.835767 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.835822 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:26Z","lastTransitionTime":"2025-12-13T06:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.938466 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.938503 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.938512 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.938525 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:26 crc kubenswrapper[4644]: I1213 06:46:26.938534 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:26Z","lastTransitionTime":"2025-12-13T06:46:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.040985 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.041031 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.041041 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.041057 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.041067 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:27Z","lastTransitionTime":"2025-12-13T06:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.143251 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.143294 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.143304 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.143318 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.143328 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:27Z","lastTransitionTime":"2025-12-13T06:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.245560 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.245596 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.245606 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.245620 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.245630 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:27Z","lastTransitionTime":"2025-12-13T06:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.316628 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.316822 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:46:59.316791963 +0000 UTC m=+81.531742795 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.316882 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.316911 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.316937 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.316957 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.317061 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.317090 4644 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.317164 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:59.317147064 +0000 UTC m=+81.532097897 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.317094 4644 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.317271 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:59.317250299 +0000 UTC m=+81.532201142 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.317105 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.317308 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.317321 4644 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.317350 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:59.317342433 +0000 UTC m=+81.532293267 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.317107 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.317385 4644 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.317472 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 06:46:59.31745684 +0000 UTC m=+81.532407673 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.348144 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.348179 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.348187 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.348200 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.348209 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:27Z","lastTransitionTime":"2025-12-13T06:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.388949 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.389061 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.389289 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.389358 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.389533 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:27 crc kubenswrapper[4644]: E1213 06:46:27.389711 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.450298 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.450325 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.450333 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.450346 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.450355 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:27Z","lastTransitionTime":"2025-12-13T06:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.551927 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.551969 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.551978 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.551993 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.552003 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:27Z","lastTransitionTime":"2025-12-13T06:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.653917 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.654125 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.654184 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.654242 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.654321 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:27Z","lastTransitionTime":"2025-12-13T06:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.757070 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.757111 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.757120 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.757134 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.757144 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:27Z","lastTransitionTime":"2025-12-13T06:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.859867 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.859904 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.859915 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.859928 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.859937 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:27Z","lastTransitionTime":"2025-12-13T06:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.962493 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.962542 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.962550 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.962564 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:27 crc kubenswrapper[4644]: I1213 06:46:27.962577 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:27Z","lastTransitionTime":"2025-12-13T06:46:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.064653 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.064696 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.064705 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.064724 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.064734 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:28Z","lastTransitionTime":"2025-12-13T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.166879 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.166924 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.166934 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.166952 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.166965 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:28Z","lastTransitionTime":"2025-12-13T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.269148 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.269186 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.269196 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.269213 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.269222 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:28Z","lastTransitionTime":"2025-12-13T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.371497 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.371537 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.371546 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.371566 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.371576 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:28Z","lastTransitionTime":"2025-12-13T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.388967 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:28 crc kubenswrapper[4644]: E1213 06:46:28.389078 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.400934 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.410082 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.416956 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.424668 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.433020 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.440462 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c88wl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae945e75-99b5-40b6-851d-dc9348056cdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:10Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c88wl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.455898 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.466378 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.473592 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.473619 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.473627 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.473640 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.473649 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:28Z","lastTransitionTime":"2025-12-13T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.477559 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.487932 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.498921 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.512346 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:21Z\\\",\\\"message\\\":\\\"ew object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1213 06:46:21.030765 6341 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1213 06:46:21.030772 6341 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1213 06:46:21.030779 6341 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nF1213 06:46:21.030782 6341 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z]\\\\nI1213 06:46:21.0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.527692 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.537359 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c95f284b-0acb-4663-966d-e8784aba4593\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7cf602b6144f7e94214304ab850074ba871148dc0165f21cb9a8cfba8d06c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc0d51af45093a810195e31aa1e43043ebc2c967fb2f2cc68683a6863d72e889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c421321fe13141866ef6b2747c48845a897a7ea4238c16e92cbcd33fa2f787c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.547283 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.555591 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.565630 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.574858 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:28Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.575333 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.575384 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.575394 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.575415 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.575436 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:28Z","lastTransitionTime":"2025-12-13T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.677836 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.677881 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.677907 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.677923 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.677934 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:28Z","lastTransitionTime":"2025-12-13T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.780488 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.780541 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.780552 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.780574 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.780585 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:28Z","lastTransitionTime":"2025-12-13T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.882964 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.883269 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.883334 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.883403 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.883511 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:28Z","lastTransitionTime":"2025-12-13T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.986183 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.986225 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.986234 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.986249 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:28 crc kubenswrapper[4644]: I1213 06:46:28.986257 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:28Z","lastTransitionTime":"2025-12-13T06:46:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.088913 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.088972 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.088982 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.088996 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.089007 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:29Z","lastTransitionTime":"2025-12-13T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.159128 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.159165 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.159173 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.159185 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.159194 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:29Z","lastTransitionTime":"2025-12-13T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:29 crc kubenswrapper[4644]: E1213 06:46:29.169756 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:29Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.173064 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.173108 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.173118 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.173134 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.173145 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:29Z","lastTransitionTime":"2025-12-13T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:29 crc kubenswrapper[4644]: E1213 06:46:29.183024 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:29Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.185907 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.185957 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.185969 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.185990 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.186002 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:29Z","lastTransitionTime":"2025-12-13T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:29 crc kubenswrapper[4644]: E1213 06:46:29.195489 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:29Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.198284 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.198315 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.198336 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.198349 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.198360 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:29Z","lastTransitionTime":"2025-12-13T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:29 crc kubenswrapper[4644]: E1213 06:46:29.208335 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:29Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.211575 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.211610 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.211620 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.211635 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.211647 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:29Z","lastTransitionTime":"2025-12-13T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:29 crc kubenswrapper[4644]: E1213 06:46:29.221899 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:29Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:29 crc kubenswrapper[4644]: E1213 06:46:29.222029 4644 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.223507 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.223540 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.223549 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.223565 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.223578 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:29Z","lastTransitionTime":"2025-12-13T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.325698 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.325758 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.325768 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.325786 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.325797 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:29Z","lastTransitionTime":"2025-12-13T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.388557 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:29 crc kubenswrapper[4644]: E1213 06:46:29.388679 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.388741 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:29 crc kubenswrapper[4644]: E1213 06:46:29.388870 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.388899 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:29 crc kubenswrapper[4644]: E1213 06:46:29.389042 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.428848 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.428888 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.428897 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.428915 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.428926 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:29Z","lastTransitionTime":"2025-12-13T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.531413 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.531480 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.531490 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.531506 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.531514 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:29Z","lastTransitionTime":"2025-12-13T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.633575 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.633620 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.633629 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.633646 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.633659 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:29Z","lastTransitionTime":"2025-12-13T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.736378 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.736419 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.736428 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.736468 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.736478 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:29Z","lastTransitionTime":"2025-12-13T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.838684 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.838726 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.838735 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.838753 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.838765 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:29Z","lastTransitionTime":"2025-12-13T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.940723 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.940766 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.940774 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.940789 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:29 crc kubenswrapper[4644]: I1213 06:46:29.940798 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:29Z","lastTransitionTime":"2025-12-13T06:46:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.043115 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.043159 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.043168 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.043185 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.043195 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:30Z","lastTransitionTime":"2025-12-13T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.145842 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.145891 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.145901 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.145916 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.145925 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:30Z","lastTransitionTime":"2025-12-13T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.247719 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.247751 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.247759 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.247772 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.247781 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:30Z","lastTransitionTime":"2025-12-13T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.350224 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.350274 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.350282 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.350299 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.350307 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:30Z","lastTransitionTime":"2025-12-13T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.388467 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:30 crc kubenswrapper[4644]: E1213 06:46:30.388621 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.452628 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.452667 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.452677 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.452692 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.452704 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:30Z","lastTransitionTime":"2025-12-13T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.555230 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.555286 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.555295 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.555310 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.555320 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:30Z","lastTransitionTime":"2025-12-13T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.657795 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.657851 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.657862 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.657876 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.657886 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:30Z","lastTransitionTime":"2025-12-13T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.760352 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.760389 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.760398 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.760411 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.760423 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:30Z","lastTransitionTime":"2025-12-13T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.862024 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.862065 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.862074 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.862091 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.862100 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:30Z","lastTransitionTime":"2025-12-13T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.963984 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.964025 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.964034 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.964048 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:30 crc kubenswrapper[4644]: I1213 06:46:30.964058 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:30Z","lastTransitionTime":"2025-12-13T06:46:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.065909 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.066173 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.066284 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.066388 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.066505 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:31Z","lastTransitionTime":"2025-12-13T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.168189 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.168434 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.168700 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.168888 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.169057 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:31Z","lastTransitionTime":"2025-12-13T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.271363 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.271403 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.271413 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.271427 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.271437 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:31Z","lastTransitionTime":"2025-12-13T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.373614 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.373648 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.373656 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.373668 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.373677 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:31Z","lastTransitionTime":"2025-12-13T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.388428 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:31 crc kubenswrapper[4644]: E1213 06:46:31.388561 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.388584 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.388650 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:31 crc kubenswrapper[4644]: E1213 06:46:31.388772 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:31 crc kubenswrapper[4644]: E1213 06:46:31.388877 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.475924 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.475971 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.475980 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.475996 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.476007 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:31Z","lastTransitionTime":"2025-12-13T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.577726 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.577760 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.577770 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.577784 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.577792 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:31Z","lastTransitionTime":"2025-12-13T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.679523 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.679572 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.679580 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.679593 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.679601 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:31Z","lastTransitionTime":"2025-12-13T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.781791 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.781838 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.781920 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.781940 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.781950 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:31Z","lastTransitionTime":"2025-12-13T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.883984 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.884023 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.884034 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.884049 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.884057 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:31Z","lastTransitionTime":"2025-12-13T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.986566 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.986611 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.986622 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.986637 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:31 crc kubenswrapper[4644]: I1213 06:46:31.986646 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:31Z","lastTransitionTime":"2025-12-13T06:46:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.088747 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.088786 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.088796 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.088810 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.088819 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:32Z","lastTransitionTime":"2025-12-13T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.191208 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.191243 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.191253 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.191267 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.191276 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:32Z","lastTransitionTime":"2025-12-13T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.293748 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.293792 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.293803 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.293818 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.293828 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:32Z","lastTransitionTime":"2025-12-13T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.388413 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:32 crc kubenswrapper[4644]: E1213 06:46:32.388582 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.395841 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.395886 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.395896 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.395910 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.395919 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:32Z","lastTransitionTime":"2025-12-13T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.498677 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.498724 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.498735 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.498752 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.498764 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:32Z","lastTransitionTime":"2025-12-13T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.600860 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.600917 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.600930 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.600946 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.600956 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:32Z","lastTransitionTime":"2025-12-13T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.702773 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.702808 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.702818 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.702831 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.702841 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:32Z","lastTransitionTime":"2025-12-13T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.805556 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.805603 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.805614 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.805633 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.805644 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:32Z","lastTransitionTime":"2025-12-13T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.908039 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.908085 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.908102 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.908119 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:32 crc kubenswrapper[4644]: I1213 06:46:32.908131 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:32Z","lastTransitionTime":"2025-12-13T06:46:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.010046 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.010094 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.010104 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.010118 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.010128 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:33Z","lastTransitionTime":"2025-12-13T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.112294 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.112330 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.112339 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.112353 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.112362 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:33Z","lastTransitionTime":"2025-12-13T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.214752 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.214788 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.214797 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.214809 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.214818 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:33Z","lastTransitionTime":"2025-12-13T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.317364 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.317397 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.317406 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.317419 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.317432 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:33Z","lastTransitionTime":"2025-12-13T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.388246 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.388309 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.388358 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:33 crc kubenswrapper[4644]: E1213 06:46:33.388393 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:33 crc kubenswrapper[4644]: E1213 06:46:33.388557 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:33 crc kubenswrapper[4644]: E1213 06:46:33.388712 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.420200 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.420237 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.420248 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.420264 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.420277 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:33Z","lastTransitionTime":"2025-12-13T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.522140 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.522181 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.522189 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.522204 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.522213 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:33Z","lastTransitionTime":"2025-12-13T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.624585 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.624629 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.624638 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.624654 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.624713 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:33Z","lastTransitionTime":"2025-12-13T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.726432 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.726500 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.726511 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.726524 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.726537 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:33Z","lastTransitionTime":"2025-12-13T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.829192 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.829227 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.829236 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.829251 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.829260 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:33Z","lastTransitionTime":"2025-12-13T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.932220 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.932277 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.932288 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.932316 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:33 crc kubenswrapper[4644]: I1213 06:46:33.932330 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:33Z","lastTransitionTime":"2025-12-13T06:46:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.034554 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.034597 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.034605 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.034621 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.034631 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:34Z","lastTransitionTime":"2025-12-13T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.137091 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.137134 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.137144 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.137158 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.137169 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:34Z","lastTransitionTime":"2025-12-13T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.239841 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.239880 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.239888 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.239902 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.239912 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:34Z","lastTransitionTime":"2025-12-13T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.342859 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.342896 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.342905 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.342923 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.342933 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:34Z","lastTransitionTime":"2025-12-13T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.388598 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:34 crc kubenswrapper[4644]: E1213 06:46:34.388736 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.444599 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.444631 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.444640 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.444653 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.444661 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:34Z","lastTransitionTime":"2025-12-13T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.546333 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.546362 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.546370 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.546395 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.546405 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:34Z","lastTransitionTime":"2025-12-13T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.648329 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.648360 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.648368 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.648380 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.648391 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:34Z","lastTransitionTime":"2025-12-13T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.750479 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.750519 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.750528 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.750538 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.750547 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:34Z","lastTransitionTime":"2025-12-13T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.852821 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.852858 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.852866 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.852880 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.852889 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:34Z","lastTransitionTime":"2025-12-13T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.954924 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.955223 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.955292 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.955371 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:34 crc kubenswrapper[4644]: I1213 06:46:34.955460 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:34Z","lastTransitionTime":"2025-12-13T06:46:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.057352 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.057397 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.057407 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.057423 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.057434 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:35Z","lastTransitionTime":"2025-12-13T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.159720 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.159773 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.159783 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.159801 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.159814 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:35Z","lastTransitionTime":"2025-12-13T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.262281 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.262323 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.262332 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.262350 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.262358 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:35Z","lastTransitionTime":"2025-12-13T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.364719 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.364756 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.364764 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.364779 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.364788 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:35Z","lastTransitionTime":"2025-12-13T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.388954 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:35 crc kubenswrapper[4644]: E1213 06:46:35.389082 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.389203 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:35 crc kubenswrapper[4644]: E1213 06:46:35.389258 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.389538 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:35 crc kubenswrapper[4644]: E1213 06:46:35.389765 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.466521 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.467051 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.467255 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.467369 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.467467 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:35Z","lastTransitionTime":"2025-12-13T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.569698 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.569753 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.569765 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.569779 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.569789 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:35Z","lastTransitionTime":"2025-12-13T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.672224 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.672303 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.672316 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.672340 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.672353 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:35Z","lastTransitionTime":"2025-12-13T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.775431 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.775501 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.775525 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.775545 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.775556 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:35Z","lastTransitionTime":"2025-12-13T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.878039 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.878091 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.878101 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.878120 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.878132 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:35Z","lastTransitionTime":"2025-12-13T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.980433 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.980493 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.980503 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.980531 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:35 crc kubenswrapper[4644]: I1213 06:46:35.980540 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:35Z","lastTransitionTime":"2025-12-13T06:46:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.083344 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.083384 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.083393 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.083406 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.083416 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:36Z","lastTransitionTime":"2025-12-13T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.186118 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.186151 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.186159 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.186175 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.186184 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:36Z","lastTransitionTime":"2025-12-13T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.288113 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.288150 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.288158 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.288171 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.288179 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:36Z","lastTransitionTime":"2025-12-13T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.389216 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:36 crc kubenswrapper[4644]: E1213 06:46:36.389345 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.390502 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.390566 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.390576 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.390589 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.390597 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:36Z","lastTransitionTime":"2025-12-13T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.492377 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.492413 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.492421 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.492434 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.492463 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:36Z","lastTransitionTime":"2025-12-13T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.594887 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.594924 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.594933 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.594947 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.594957 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:36Z","lastTransitionTime":"2025-12-13T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.697588 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.697629 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.697637 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.697653 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.697662 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:36Z","lastTransitionTime":"2025-12-13T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.799978 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.800020 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.800033 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.800050 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.800060 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:36Z","lastTransitionTime":"2025-12-13T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.902349 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.902657 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.902727 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.902805 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:36 crc kubenswrapper[4644]: I1213 06:46:36.902860 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:36Z","lastTransitionTime":"2025-12-13T06:46:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.005411 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.005462 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.005471 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.005488 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.005498 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:37Z","lastTransitionTime":"2025-12-13T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.108098 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.108366 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.108433 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.108537 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.108613 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:37Z","lastTransitionTime":"2025-12-13T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.211355 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.211650 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.211715 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.211780 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.211848 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:37Z","lastTransitionTime":"2025-12-13T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.314040 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.314077 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.314085 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.314098 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.314109 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:37Z","lastTransitionTime":"2025-12-13T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.388875 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:37 crc kubenswrapper[4644]: E1213 06:46:37.388997 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.388896 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.388882 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:37 crc kubenswrapper[4644]: E1213 06:46:37.389358 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:37 crc kubenswrapper[4644]: E1213 06:46:37.389468 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.389597 4644 scope.go:117] "RemoveContainer" containerID="feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832" Dec 13 06:46:37 crc kubenswrapper[4644]: E1213 06:46:37.389836 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.415763 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.415802 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.415811 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.415826 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.415839 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:37Z","lastTransitionTime":"2025-12-13T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.518056 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.518090 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.518100 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.518115 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.518123 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:37Z","lastTransitionTime":"2025-12-13T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.620069 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.620110 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.620122 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.620139 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.620149 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:37Z","lastTransitionTime":"2025-12-13T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.722652 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.722693 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.722703 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.722716 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.722725 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:37Z","lastTransitionTime":"2025-12-13T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.825005 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.825048 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.825056 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.825075 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.825086 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:37Z","lastTransitionTime":"2025-12-13T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.927210 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.927251 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.927259 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.927275 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:37 crc kubenswrapper[4644]: I1213 06:46:37.927284 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:37Z","lastTransitionTime":"2025-12-13T06:46:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.029346 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.029393 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.029402 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.029417 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.029427 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:38Z","lastTransitionTime":"2025-12-13T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.131997 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.132038 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.132046 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.132061 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.132070 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:38Z","lastTransitionTime":"2025-12-13T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.234015 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.234061 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.234070 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.234084 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.234095 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:38Z","lastTransitionTime":"2025-12-13T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.336165 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.336214 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.336225 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.336240 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.336251 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:38Z","lastTransitionTime":"2025-12-13T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.389645 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:38 crc kubenswrapper[4644]: E1213 06:46:38.389836 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.400327 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.408811 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c88wl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae945e75-99b5-40b6-851d-dc9348056cdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:10Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c88wl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.423029 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.433177 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.438924 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.439043 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.439114 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.439185 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.439251 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:38Z","lastTransitionTime":"2025-12-13T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.443911 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.454175 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.466056 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.479144 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.491211 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.502499 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.514018 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.524010 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c95f284b-0acb-4663-966d-e8784aba4593\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7cf602b6144f7e94214304ab850074ba871148dc0165f21cb9a8cfba8d06c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc0d51af45093a810195e31aa1e43043ebc2c967fb2f2cc68683a6863d72e889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c421321fe13141866ef6b2747c48845a897a7ea4238c16e92cbcd33fa2f787c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.534988 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.542097 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.542142 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.542152 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.542169 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.542179 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:38Z","lastTransitionTime":"2025-12-13T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.549719 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:21Z\\\",\\\"message\\\":\\\"ew object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1213 06:46:21.030765 6341 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1213 06:46:21.030772 6341 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1213 06:46:21.030779 6341 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nF1213 06:46:21.030782 6341 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z]\\\\nI1213 06:46:21.0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.561184 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.571980 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.582160 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.591402 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:38Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.644051 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.644092 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.644101 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.644130 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.644140 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:38Z","lastTransitionTime":"2025-12-13T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.746267 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.746314 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.746322 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.746337 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.746347 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:38Z","lastTransitionTime":"2025-12-13T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.849291 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.849335 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.849345 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.849361 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.849372 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:38Z","lastTransitionTime":"2025-12-13T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.951766 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.951802 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.951812 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.951825 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:38 crc kubenswrapper[4644]: I1213 06:46:38.951835 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:38Z","lastTransitionTime":"2025-12-13T06:46:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.055025 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.055081 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.055092 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.055111 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.055123 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:39Z","lastTransitionTime":"2025-12-13T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.157746 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.157793 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.157802 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.157817 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.157826 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:39Z","lastTransitionTime":"2025-12-13T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.260883 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.260936 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.260947 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.260963 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.260974 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:39Z","lastTransitionTime":"2025-12-13T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.363170 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.363217 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.363228 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.363246 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.363257 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:39Z","lastTransitionTime":"2025-12-13T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.388906 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.388964 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.388997 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:39 crc kubenswrapper[4644]: E1213 06:46:39.389072 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:39 crc kubenswrapper[4644]: E1213 06:46:39.389189 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:39 crc kubenswrapper[4644]: E1213 06:46:39.389360 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.465456 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.465492 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.465501 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.465516 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.465524 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:39Z","lastTransitionTime":"2025-12-13T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.567845 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.567901 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.567913 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.567927 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.567935 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:39Z","lastTransitionTime":"2025-12-13T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.594306 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.594373 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.594384 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.594407 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.594418 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:39Z","lastTransitionTime":"2025-12-13T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:39 crc kubenswrapper[4644]: E1213 06:46:39.609070 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.612827 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.612858 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.612867 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.612879 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.612887 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:39Z","lastTransitionTime":"2025-12-13T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:39 crc kubenswrapper[4644]: E1213 06:46:39.622880 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.625984 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.626022 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.626032 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.626047 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.626056 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:39Z","lastTransitionTime":"2025-12-13T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:39 crc kubenswrapper[4644]: E1213 06:46:39.635620 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.639435 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.639482 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.639492 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.639506 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.639515 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:39Z","lastTransitionTime":"2025-12-13T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:39 crc kubenswrapper[4644]: E1213 06:46:39.648993 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.652311 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.652349 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.652362 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.652377 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.652389 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:39Z","lastTransitionTime":"2025-12-13T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:39 crc kubenswrapper[4644]: E1213 06:46:39.662030 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:39Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:39 crc kubenswrapper[4644]: E1213 06:46:39.662167 4644 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.669562 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.669589 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.669598 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.669613 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.669622 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:39Z","lastTransitionTime":"2025-12-13T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.772672 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.772717 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.772729 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.772745 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.772754 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:39Z","lastTransitionTime":"2025-12-13T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.874638 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.874687 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.874696 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.874713 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.874723 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:39Z","lastTransitionTime":"2025-12-13T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.977233 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.977284 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.977295 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.977309 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:39 crc kubenswrapper[4644]: I1213 06:46:39.977319 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:39Z","lastTransitionTime":"2025-12-13T06:46:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.079980 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.080023 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.080033 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.080047 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.080061 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:40Z","lastTransitionTime":"2025-12-13T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.182469 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.182520 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.182533 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.182550 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.182575 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:40Z","lastTransitionTime":"2025-12-13T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.284831 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.284889 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.284904 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.284922 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.284932 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:40Z","lastTransitionTime":"2025-12-13T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.387137 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.387178 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.387188 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.387203 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.387212 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:40Z","lastTransitionTime":"2025-12-13T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.388652 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:40 crc kubenswrapper[4644]: E1213 06:46:40.388787 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.489727 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.489769 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.489781 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.489798 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.489807 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:40Z","lastTransitionTime":"2025-12-13T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.592242 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.592282 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.592290 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.592304 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.592313 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:40Z","lastTransitionTime":"2025-12-13T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.694600 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.694633 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.694641 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.694657 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.694667 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:40Z","lastTransitionTime":"2025-12-13T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.796686 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.796726 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.796735 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.796749 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.796757 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:40Z","lastTransitionTime":"2025-12-13T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.899002 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.899040 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.899049 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.899063 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:40 crc kubenswrapper[4644]: I1213 06:46:40.899072 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:40Z","lastTransitionTime":"2025-12-13T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.003159 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.003208 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.003217 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.003234 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.003245 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:41Z","lastTransitionTime":"2025-12-13T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.106018 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.106065 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.106074 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.106091 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.106101 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:41Z","lastTransitionTime":"2025-12-13T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.208319 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.208359 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.208369 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.208383 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.208393 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:41Z","lastTransitionTime":"2025-12-13T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.312171 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.312206 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.312216 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.312229 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.312237 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:41Z","lastTransitionTime":"2025-12-13T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.389150 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.389154 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:41 crc kubenswrapper[4644]: E1213 06:46:41.389591 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:41 crc kubenswrapper[4644]: E1213 06:46:41.389662 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.389169 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:41 crc kubenswrapper[4644]: E1213 06:46:41.389825 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.415401 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.415600 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.415689 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.415775 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.415836 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:41Z","lastTransitionTime":"2025-12-13T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.518755 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.518796 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.518805 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.518819 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.518830 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:41Z","lastTransitionTime":"2025-12-13T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.622153 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.622315 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.622376 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.622453 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.622531 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:41Z","lastTransitionTime":"2025-12-13T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.725280 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.725326 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.725337 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.725354 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.725368 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:41Z","lastTransitionTime":"2025-12-13T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.827970 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.828018 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.828029 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.828044 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.828058 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:41Z","lastTransitionTime":"2025-12-13T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.930108 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.930152 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.930164 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.930185 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:41 crc kubenswrapper[4644]: I1213 06:46:41.930196 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:41Z","lastTransitionTime":"2025-12-13T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.032392 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.032693 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.032791 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.032869 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.032935 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:42Z","lastTransitionTime":"2025-12-13T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.135151 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.135398 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.135498 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.135589 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.135658 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:42Z","lastTransitionTime":"2025-12-13T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.238026 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.238380 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.238464 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.238533 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.238607 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:42Z","lastTransitionTime":"2025-12-13T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.341797 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.341843 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.341852 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.341870 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.341882 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:42Z","lastTransitionTime":"2025-12-13T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.372536 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs\") pod \"network-metrics-daemon-c88wl\" (UID: \"ae945e75-99b5-40b6-851d-dc9348056cdb\") " pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:42 crc kubenswrapper[4644]: E1213 06:46:42.372708 4644 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:46:42 crc kubenswrapper[4644]: E1213 06:46:42.372766 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs podName:ae945e75-99b5-40b6-851d-dc9348056cdb nodeName:}" failed. No retries permitted until 2025-12-13 06:47:14.372751672 +0000 UTC m=+96.587702506 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs") pod "network-metrics-daemon-c88wl" (UID: "ae945e75-99b5-40b6-851d-dc9348056cdb") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.388638 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:42 crc kubenswrapper[4644]: E1213 06:46:42.388882 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.444358 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.444710 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.444818 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.444901 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.444963 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:42Z","lastTransitionTime":"2025-12-13T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.547523 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.547569 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.547578 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.547604 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.547614 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:42Z","lastTransitionTime":"2025-12-13T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.650114 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.650148 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.650157 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.650171 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.650181 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:42Z","lastTransitionTime":"2025-12-13T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.752059 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.752095 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.752103 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.752117 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.752126 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:42Z","lastTransitionTime":"2025-12-13T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.854643 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.855046 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.855127 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.855209 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.855296 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:42Z","lastTransitionTime":"2025-12-13T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.957760 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.957795 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.957803 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.957817 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:42 crc kubenswrapper[4644]: I1213 06:46:42.957826 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:42Z","lastTransitionTime":"2025-12-13T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.060891 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.060941 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.060949 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.060965 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.060975 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:43Z","lastTransitionTime":"2025-12-13T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.163189 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.163238 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.163253 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.163271 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.163280 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:43Z","lastTransitionTime":"2025-12-13T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.265137 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.265172 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.265180 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.265193 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.265203 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:43Z","lastTransitionTime":"2025-12-13T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.367666 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.367704 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.367716 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.367731 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.367741 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:43Z","lastTransitionTime":"2025-12-13T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.389226 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.389251 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:43 crc kubenswrapper[4644]: E1213 06:46:43.389359 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.389388 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:43 crc kubenswrapper[4644]: E1213 06:46:43.389499 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:43 crc kubenswrapper[4644]: E1213 06:46:43.389568 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.469509 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.469809 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.469897 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.469960 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.470024 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:43Z","lastTransitionTime":"2025-12-13T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.573021 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.573550 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.573635 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.573705 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.573766 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:43Z","lastTransitionTime":"2025-12-13T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.676676 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.676726 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.676737 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.676755 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.676765 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:43Z","lastTransitionTime":"2025-12-13T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.696657 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lbk25_9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd/kube-multus/0.log" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.696724 4644 generic.go:334] "Generic (PLEG): container finished" podID="9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd" containerID="f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43" exitCode=1 Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.696754 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lbk25" event={"ID":"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd","Type":"ContainerDied","Data":"f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43"} Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.697154 4644 scope.go:117] "RemoveContainer" containerID="f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.709368 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.720162 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.729624 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.743795 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.759750 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.776507 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:21Z\\\",\\\"message\\\":\\\"ew object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1213 06:46:21.030765 6341 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1213 06:46:21.030772 6341 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1213 06:46:21.030779 6341 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nF1213 06:46:21.030782 6341 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z]\\\\nI1213 06:46:21.0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.779065 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.779094 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.779103 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.779117 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.779127 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:43Z","lastTransitionTime":"2025-12-13T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.789790 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.801457 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c95f284b-0acb-4663-966d-e8784aba4593\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7cf602b6144f7e94214304ab850074ba871148dc0165f21cb9a8cfba8d06c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc0d51af45093a810195e31aa1e43043ebc2c967fb2f2cc68683a6863d72e889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c421321fe13141866ef6b2747c48845a897a7ea4238c16e92cbcd33fa2f787c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.813553 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:42Z\\\",\\\"message\\\":\\\"2025-12-13T06:45:57+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2dd0d337-ca42-448f-a3d3-e9fec193beb2\\\\n2025-12-13T06:45:57+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2dd0d337-ca42-448f-a3d3-e9fec193beb2 to /host/opt/cni/bin/\\\\n2025-12-13T06:45:57Z [verbose] multus-daemon started\\\\n2025-12-13T06:45:57Z [verbose] Readiness Indicator file check\\\\n2025-12-13T06:46:42Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.824809 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.835094 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.852290 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.865875 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.877654 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.881986 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.882040 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.882051 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.882067 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.882078 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:43Z","lastTransitionTime":"2025-12-13T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.892399 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.901104 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.912132 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.927379 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c88wl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae945e75-99b5-40b6-851d-dc9348056cdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:10Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c88wl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:43Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.984436 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.984500 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.984508 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.984526 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:43 crc kubenswrapper[4644]: I1213 06:46:43.984535 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:43Z","lastTransitionTime":"2025-12-13T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.086750 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.086792 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.086802 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.086819 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.086827 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:44Z","lastTransitionTime":"2025-12-13T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.189202 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.189564 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.189666 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.189742 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.189803 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:44Z","lastTransitionTime":"2025-12-13T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.291679 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.291968 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.292048 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.292126 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.292190 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:44Z","lastTransitionTime":"2025-12-13T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.388865 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:44 crc kubenswrapper[4644]: E1213 06:46:44.389038 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.393955 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.393995 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.394005 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.394022 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.394032 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:44Z","lastTransitionTime":"2025-12-13T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.496707 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.496748 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.496759 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.496776 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.496786 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:44Z","lastTransitionTime":"2025-12-13T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.599061 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.599093 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.599102 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.599115 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.599125 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:44Z","lastTransitionTime":"2025-12-13T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.701055 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.701099 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.701110 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.701127 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.701137 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:44Z","lastTransitionTime":"2025-12-13T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.702025 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lbk25_9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd/kube-multus/0.log" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.702081 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lbk25" event={"ID":"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd","Type":"ContainerStarted","Data":"68f72e73eb6fea5f57ed95c448c349e23ad92277af62f67c45217ef0c03e07e1"} Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.717959 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.730633 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68f72e73eb6fea5f57ed95c448c349e23ad92277af62f67c45217ef0c03e07e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:42Z\\\",\\\"message\\\":\\\"2025-12-13T06:45:57+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2dd0d337-ca42-448f-a3d3-e9fec193beb2\\\\n2025-12-13T06:45:57+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2dd0d337-ca42-448f-a3d3-e9fec193beb2 to /host/opt/cni/bin/\\\\n2025-12-13T06:45:57Z [verbose] multus-daemon started\\\\n2025-12-13T06:45:57Z [verbose] Readiness Indicator file check\\\\n2025-12-13T06:46:42Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.741406 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.751332 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.762759 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.771544 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c88wl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae945e75-99b5-40b6-851d-dc9348056cdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:10Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c88wl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.790817 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.801964 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.803156 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.803188 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.803196 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.803210 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.803219 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:44Z","lastTransitionTime":"2025-12-13T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.813678 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.822462 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.833932 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.845401 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.855265 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.878381 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.904065 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c95f284b-0acb-4663-966d-e8784aba4593\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7cf602b6144f7e94214304ab850074ba871148dc0165f21cb9a8cfba8d06c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc0d51af45093a810195e31aa1e43043ebc2c967fb2f2cc68683a6863d72e889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c421321fe13141866ef6b2747c48845a897a7ea4238c16e92cbcd33fa2f787c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.905398 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.905463 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.905476 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.905498 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.905510 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:44Z","lastTransitionTime":"2025-12-13T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.922500 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.939575 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:21Z\\\",\\\"message\\\":\\\"ew object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1213 06:46:21.030765 6341 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1213 06:46:21.030772 6341 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1213 06:46:21.030779 6341 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nF1213 06:46:21.030782 6341 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z]\\\\nI1213 06:46:21.0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:44 crc kubenswrapper[4644]: I1213 06:46:44.952530 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:44Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.008179 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.008224 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.008234 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.008249 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.008261 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:45Z","lastTransitionTime":"2025-12-13T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.110731 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.110769 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.110779 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.110793 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.110802 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:45Z","lastTransitionTime":"2025-12-13T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.213521 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.213561 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.213571 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.213587 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.213597 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:45Z","lastTransitionTime":"2025-12-13T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.316021 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.316056 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.316064 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.316080 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.316090 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:45Z","lastTransitionTime":"2025-12-13T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.388432 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.388513 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.388551 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:45 crc kubenswrapper[4644]: E1213 06:46:45.388601 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:45 crc kubenswrapper[4644]: E1213 06:46:45.388726 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:45 crc kubenswrapper[4644]: E1213 06:46:45.388823 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.418010 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.418058 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.418068 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.418086 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.418099 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:45Z","lastTransitionTime":"2025-12-13T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.520062 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.520107 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.520116 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.520132 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.520141 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:45Z","lastTransitionTime":"2025-12-13T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.622817 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.622866 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.622876 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.622893 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.622904 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:45Z","lastTransitionTime":"2025-12-13T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.725282 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.725319 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.725330 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.725346 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.725356 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:45Z","lastTransitionTime":"2025-12-13T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.827480 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.827523 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.827531 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.827546 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.827556 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:45Z","lastTransitionTime":"2025-12-13T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.929489 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.929527 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.929537 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.929555 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:45 crc kubenswrapper[4644]: I1213 06:46:45.929566 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:45Z","lastTransitionTime":"2025-12-13T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.032290 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.032335 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.032345 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.032363 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.032372 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:46Z","lastTransitionTime":"2025-12-13T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.136115 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.136160 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.136168 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.136183 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.136195 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:46Z","lastTransitionTime":"2025-12-13T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.238668 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.238718 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.238727 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.238744 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.238757 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:46Z","lastTransitionTime":"2025-12-13T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.340758 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.340798 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.340808 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.340823 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.340833 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:46Z","lastTransitionTime":"2025-12-13T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.388575 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:46 crc kubenswrapper[4644]: E1213 06:46:46.388718 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.443249 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.443286 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.443297 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.443316 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.443326 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:46Z","lastTransitionTime":"2025-12-13T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.545661 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.545713 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.545723 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.545741 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.545751 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:46Z","lastTransitionTime":"2025-12-13T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.647882 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.647926 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.647935 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.647948 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.647957 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:46Z","lastTransitionTime":"2025-12-13T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.749946 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.749982 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.749990 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.750005 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.750014 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:46Z","lastTransitionTime":"2025-12-13T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.852726 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.852765 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.852774 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.852788 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.852797 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:46Z","lastTransitionTime":"2025-12-13T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.955131 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.955162 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.955173 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.955185 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:46 crc kubenswrapper[4644]: I1213 06:46:46.955193 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:46Z","lastTransitionTime":"2025-12-13T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.057073 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.057121 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.057130 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.057147 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.057158 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:47Z","lastTransitionTime":"2025-12-13T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.159793 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.159828 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.159836 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.159849 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.159857 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:47Z","lastTransitionTime":"2025-12-13T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.261577 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.261610 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.261618 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.261631 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.261655 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:47Z","lastTransitionTime":"2025-12-13T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.364048 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.364087 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.364095 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.364111 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.364119 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:47Z","lastTransitionTime":"2025-12-13T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.388896 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.388974 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:47 crc kubenswrapper[4644]: E1213 06:46:47.388994 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.388896 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:47 crc kubenswrapper[4644]: E1213 06:46:47.389100 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:47 crc kubenswrapper[4644]: E1213 06:46:47.389136 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.466149 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.466185 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.466193 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.466228 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.466239 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:47Z","lastTransitionTime":"2025-12-13T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.569104 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.569148 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.569158 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.569174 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.569184 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:47Z","lastTransitionTime":"2025-12-13T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.671648 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.671696 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.671705 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.671719 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.671730 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:47Z","lastTransitionTime":"2025-12-13T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.774016 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.774048 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.774057 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.774072 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.774081 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:47Z","lastTransitionTime":"2025-12-13T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.876812 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.876852 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.876861 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.876876 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.876885 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:47Z","lastTransitionTime":"2025-12-13T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.979496 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.979567 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.979577 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.979593 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:47 crc kubenswrapper[4644]: I1213 06:46:47.979602 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:47Z","lastTransitionTime":"2025-12-13T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.084657 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.084720 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.084733 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.084751 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.084767 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:48Z","lastTransitionTime":"2025-12-13T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.187878 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.187922 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.187931 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.187945 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.187955 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:48Z","lastTransitionTime":"2025-12-13T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.290586 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.290653 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.290663 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.290679 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.290688 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:48Z","lastTransitionTime":"2025-12-13T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.388529 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:48 crc kubenswrapper[4644]: E1213 06:46:48.388713 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.392807 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.392862 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.392871 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.392884 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.392893 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:48Z","lastTransitionTime":"2025-12-13T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.398842 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.408733 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.417338 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c88wl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae945e75-99b5-40b6-851d-dc9348056cdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:10Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c88wl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.432272 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.442647 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.451282 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.460979 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.471138 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.480427 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.489948 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.495377 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.495415 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.495424 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.495452 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.495460 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:48Z","lastTransitionTime":"2025-12-13T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.501295 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.509770 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c95f284b-0acb-4663-966d-e8784aba4593\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7cf602b6144f7e94214304ab850074ba871148dc0165f21cb9a8cfba8d06c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc0d51af45093a810195e31aa1e43043ebc2c967fb2f2cc68683a6863d72e889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c421321fe13141866ef6b2747c48845a897a7ea4238c16e92cbcd33fa2f787c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.520025 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.532952 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:21Z\\\",\\\"message\\\":\\\"ew object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1213 06:46:21.030765 6341 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1213 06:46:21.030772 6341 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1213 06:46:21.030779 6341 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nF1213 06:46:21.030782 6341 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z]\\\\nI1213 06:46:21.0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.544837 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.554948 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.564501 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68f72e73eb6fea5f57ed95c448c349e23ad92277af62f67c45217ef0c03e07e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:42Z\\\",\\\"message\\\":\\\"2025-12-13T06:45:57+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2dd0d337-ca42-448f-a3d3-e9fec193beb2\\\\n2025-12-13T06:45:57+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2dd0d337-ca42-448f-a3d3-e9fec193beb2 to /host/opt/cni/bin/\\\\n2025-12-13T06:45:57Z [verbose] multus-daemon started\\\\n2025-12-13T06:45:57Z [verbose] Readiness Indicator file check\\\\n2025-12-13T06:46:42Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.572170 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:48Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.597807 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.597841 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.597853 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.597868 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.597878 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:48Z","lastTransitionTime":"2025-12-13T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.700299 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.700335 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.700343 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.700357 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.700369 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:48Z","lastTransitionTime":"2025-12-13T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.803034 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.803070 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.803079 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.803092 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.803101 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:48Z","lastTransitionTime":"2025-12-13T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.905372 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.905411 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.905420 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.905434 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:48 crc kubenswrapper[4644]: I1213 06:46:48.905458 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:48Z","lastTransitionTime":"2025-12-13T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.007519 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.007563 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.007572 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.007586 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.007596 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:49Z","lastTransitionTime":"2025-12-13T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.112914 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.112984 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.112999 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.113031 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.113064 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:49Z","lastTransitionTime":"2025-12-13T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.215633 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.215671 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.215681 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.215695 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.215705 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:49Z","lastTransitionTime":"2025-12-13T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.317880 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.317914 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.317922 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.317937 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.317946 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:49Z","lastTransitionTime":"2025-12-13T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.388806 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.388840 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.388893 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:49 crc kubenswrapper[4644]: E1213 06:46:49.388976 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:49 crc kubenswrapper[4644]: E1213 06:46:49.389091 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:49 crc kubenswrapper[4644]: E1213 06:46:49.389119 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.420249 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.420292 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.420301 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.420312 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.420531 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:49Z","lastTransitionTime":"2025-12-13T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.522704 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.522740 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.522750 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.522763 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.522772 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:49Z","lastTransitionTime":"2025-12-13T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.624745 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.624796 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.624805 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.624822 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.624835 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:49Z","lastTransitionTime":"2025-12-13T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.680541 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.680578 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.680589 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.680605 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.680614 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:49Z","lastTransitionTime":"2025-12-13T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:49 crc kubenswrapper[4644]: E1213 06:46:49.691700 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.694833 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.694862 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.694871 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.694885 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.694894 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:49Z","lastTransitionTime":"2025-12-13T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:49 crc kubenswrapper[4644]: E1213 06:46:49.704393 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.707366 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.707396 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.707405 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.707417 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.707425 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:49Z","lastTransitionTime":"2025-12-13T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:49 crc kubenswrapper[4644]: E1213 06:46:49.718062 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.721110 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.721146 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.721156 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.721169 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.721178 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:49Z","lastTransitionTime":"2025-12-13T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:49 crc kubenswrapper[4644]: E1213 06:46:49.732192 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.735163 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.735195 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.735204 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.735216 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.735225 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:49Z","lastTransitionTime":"2025-12-13T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:49 crc kubenswrapper[4644]: E1213 06:46:49.744045 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d67b613b-3746-4fbb-91d0-cb6f6c249fb5\\\",\\\"systemUUID\\\":\\\"b4ea0d06-22aa-4091-83fc-aeb05ba823a8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:49Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:49 crc kubenswrapper[4644]: E1213 06:46:49.744147 4644 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.745387 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.745417 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.745425 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.745453 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.745463 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:49Z","lastTransitionTime":"2025-12-13T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.848177 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.848217 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.848226 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.848243 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.848252 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:49Z","lastTransitionTime":"2025-12-13T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.950781 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.950823 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.950832 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.950849 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:49 crc kubenswrapper[4644]: I1213 06:46:49.950861 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:49Z","lastTransitionTime":"2025-12-13T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.053208 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.053250 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.053261 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.053277 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.053288 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:50Z","lastTransitionTime":"2025-12-13T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.155464 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.155516 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.155524 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.155539 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.155549 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:50Z","lastTransitionTime":"2025-12-13T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.257566 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.257630 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.257639 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.257657 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.257665 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:50Z","lastTransitionTime":"2025-12-13T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.360071 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.360101 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.360109 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.360122 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.360147 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:50Z","lastTransitionTime":"2025-12-13T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.388714 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:50 crc kubenswrapper[4644]: E1213 06:46:50.388826 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.462396 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.462458 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.462468 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.462486 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.462498 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:50Z","lastTransitionTime":"2025-12-13T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.564732 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.564780 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.564791 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.564808 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.564821 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:50Z","lastTransitionTime":"2025-12-13T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.667013 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.667056 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.667065 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.667079 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.667090 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:50Z","lastTransitionTime":"2025-12-13T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.769175 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.769216 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.769225 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.769243 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.769253 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:50Z","lastTransitionTime":"2025-12-13T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.875678 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.875718 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.875727 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.875743 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.875752 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:50Z","lastTransitionTime":"2025-12-13T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.978365 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.978403 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.978412 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.978431 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:50 crc kubenswrapper[4644]: I1213 06:46:50.978462 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:50Z","lastTransitionTime":"2025-12-13T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.080982 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.081023 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.081031 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.081048 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.081057 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:51Z","lastTransitionTime":"2025-12-13T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.183220 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.183254 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.183275 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.183289 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.183297 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:51Z","lastTransitionTime":"2025-12-13T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.286333 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.286368 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.286376 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.286392 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.286405 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:51Z","lastTransitionTime":"2025-12-13T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.388178 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:51 crc kubenswrapper[4644]: E1213 06:46:51.388286 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.388465 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:51 crc kubenswrapper[4644]: E1213 06:46:51.388514 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.388548 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:51 crc kubenswrapper[4644]: E1213 06:46:51.388582 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.388984 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.389009 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.389017 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.389028 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.389037 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:51Z","lastTransitionTime":"2025-12-13T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.491762 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.491813 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.491824 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.491843 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.491856 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:51Z","lastTransitionTime":"2025-12-13T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.593821 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.593868 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.593879 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.593895 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.593907 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:51Z","lastTransitionTime":"2025-12-13T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.695938 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.696213 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.696282 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.696355 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.696432 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:51Z","lastTransitionTime":"2025-12-13T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.797921 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.798412 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.798508 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.798583 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.798662 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:51Z","lastTransitionTime":"2025-12-13T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.900942 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.900968 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.900976 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.900990 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:51 crc kubenswrapper[4644]: I1213 06:46:51.900998 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:51Z","lastTransitionTime":"2025-12-13T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.003999 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.004040 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.004049 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.004065 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.004074 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:52Z","lastTransitionTime":"2025-12-13T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.106570 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.106838 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.106918 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.107015 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.107114 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:52Z","lastTransitionTime":"2025-12-13T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.209939 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.210196 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.210265 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.210340 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.210397 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:52Z","lastTransitionTime":"2025-12-13T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.313357 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.313395 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.313405 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.313420 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.313431 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:52Z","lastTransitionTime":"2025-12-13T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.388624 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:52 crc kubenswrapper[4644]: E1213 06:46:52.388968 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.389215 4644 scope.go:117] "RemoveContainer" containerID="feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.415554 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.415592 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.415621 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.415636 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.415646 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:52Z","lastTransitionTime":"2025-12-13T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.518769 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.519088 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.519103 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.519121 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.519131 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:52Z","lastTransitionTime":"2025-12-13T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.624764 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.624804 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.624813 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.624828 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.624837 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:52Z","lastTransitionTime":"2025-12-13T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.722736 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovnkube-controller/2.log" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.725104 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerStarted","Data":"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012"} Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.725506 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.726054 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.726089 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.726098 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.726114 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.726123 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:52Z","lastTransitionTime":"2025-12-13T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.735420 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.745010 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.754346 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.763620 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.778698 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:21Z\\\",\\\"message\\\":\\\"ew object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1213 06:46:21.030765 6341 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1213 06:46:21.030772 6341 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1213 06:46:21.030779 6341 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nF1213 06:46:21.030782 6341 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z]\\\\nI1213 06:46:21.0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.790951 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.801160 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c95f284b-0acb-4663-966d-e8784aba4593\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7cf602b6144f7e94214304ab850074ba871148dc0165f21cb9a8cfba8d06c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc0d51af45093a810195e31aa1e43043ebc2c967fb2f2cc68683a6863d72e889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c421321fe13141866ef6b2747c48845a897a7ea4238c16e92cbcd33fa2f787c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.815492 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.825480 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.827967 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.828007 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.828017 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.828032 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.828041 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:52Z","lastTransitionTime":"2025-12-13T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.837713 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.848027 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68f72e73eb6fea5f57ed95c448c349e23ad92277af62f67c45217ef0c03e07e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:42Z\\\",\\\"message\\\":\\\"2025-12-13T06:45:57+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2dd0d337-ca42-448f-a3d3-e9fec193beb2\\\\n2025-12-13T06:45:57+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2dd0d337-ca42-448f-a3d3-e9fec193beb2 to /host/opt/cni/bin/\\\\n2025-12-13T06:45:57Z [verbose] multus-daemon started\\\\n2025-12-13T06:45:57Z [verbose] Readiness Indicator file check\\\\n2025-12-13T06:46:42Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.858069 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.867817 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.875196 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.882894 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.893097 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.905514 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c88wl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae945e75-99b5-40b6-851d-dc9348056cdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:10Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c88wl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.922864 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:52Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.930633 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.930676 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.930687 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.930702 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:52 crc kubenswrapper[4644]: I1213 06:46:52.930713 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:52Z","lastTransitionTime":"2025-12-13T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.033283 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.033332 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.033343 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.033364 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.033375 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:53Z","lastTransitionTime":"2025-12-13T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.136182 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.136232 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.136241 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.136257 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.136267 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:53Z","lastTransitionTime":"2025-12-13T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.238918 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.238965 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.238977 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.238994 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.239004 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:53Z","lastTransitionTime":"2025-12-13T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.341861 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.341908 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.341918 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.341938 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.341950 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:53Z","lastTransitionTime":"2025-12-13T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.388638 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.388645 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:53 crc kubenswrapper[4644]: E1213 06:46:53.388803 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:53 crc kubenswrapper[4644]: E1213 06:46:53.388858 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.388640 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:53 crc kubenswrapper[4644]: E1213 06:46:53.388943 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.445002 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.445051 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.445061 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.445077 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.445088 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:53Z","lastTransitionTime":"2025-12-13T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.547552 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.547609 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.547633 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.547650 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.547664 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:53Z","lastTransitionTime":"2025-12-13T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.650288 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.650340 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.650352 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.650368 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.650377 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:53Z","lastTransitionTime":"2025-12-13T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.730351 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovnkube-controller/3.log" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.731084 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovnkube-controller/2.log" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.734424 4644 generic.go:334] "Generic (PLEG): container finished" podID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerID="ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012" exitCode=1 Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.734514 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerDied","Data":"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012"} Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.734565 4644 scope.go:117] "RemoveContainer" containerID="feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.735220 4644 scope.go:117] "RemoveContainer" containerID="ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012" Dec 13 06:46:53 crc kubenswrapper[4644]: E1213 06:46:53.735388 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.747280 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.753158 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.753228 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.753238 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.753255 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.753282 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:53Z","lastTransitionTime":"2025-12-13T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.757230 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.768857 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68f72e73eb6fea5f57ed95c448c349e23ad92277af62f67c45217ef0c03e07e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:42Z\\\",\\\"message\\\":\\\"2025-12-13T06:45:57+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2dd0d337-ca42-448f-a3d3-e9fec193beb2\\\\n2025-12-13T06:45:57+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2dd0d337-ca42-448f-a3d3-e9fec193beb2 to /host/opt/cni/bin/\\\\n2025-12-13T06:45:57Z [verbose] multus-daemon started\\\\n2025-12-13T06:45:57Z [verbose] Readiness Indicator file check\\\\n2025-12-13T06:46:42Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.780042 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.789910 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.799920 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.809766 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.820171 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.829757 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c88wl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae945e75-99b5-40b6-851d-dc9348056cdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:10Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c88wl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.847053 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.855947 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.856016 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.856027 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.856045 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.856075 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:53Z","lastTransitionTime":"2025-12-13T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.858517 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.869046 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.880004 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.892745 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.907890 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feacb71d1684aa42d2f0991c89f74092da0486300d3f874b72bc7b19272d0832\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:21Z\\\",\\\"message\\\":\\\"ew object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1213 06:46:21.030765 6341 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1213 06:46:21.030772 6341 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1213 06:46:21.030779 6341 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nF1213 06:46:21.030782 6341 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:21Z is after 2025-08-24T17:21:41Z]\\\\nI1213 06:46:21.0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:20Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:53Z\\\",\\\"message\\\":\\\" [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Lo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.920524 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.930328 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c95f284b-0acb-4663-966d-e8784aba4593\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7cf602b6144f7e94214304ab850074ba871148dc0165f21cb9a8cfba8d06c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc0d51af45093a810195e31aa1e43043ebc2c967fb2f2cc68683a6863d72e889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c421321fe13141866ef6b2747c48845a897a7ea4238c16e92cbcd33fa2f787c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.940138 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:53Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.958902 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.958964 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.958975 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.958992 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:53 crc kubenswrapper[4644]: I1213 06:46:53.959004 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:53Z","lastTransitionTime":"2025-12-13T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.060770 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.060821 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.060831 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.060849 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.060859 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:54Z","lastTransitionTime":"2025-12-13T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.162711 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.162747 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.162755 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.162771 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.162783 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:54Z","lastTransitionTime":"2025-12-13T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.264764 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.264801 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.264810 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.264825 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.264857 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:54Z","lastTransitionTime":"2025-12-13T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.367092 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.367132 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.367141 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.367156 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.367166 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:54Z","lastTransitionTime":"2025-12-13T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.388645 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:54 crc kubenswrapper[4644]: E1213 06:46:54.388784 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.469548 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.469611 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.469622 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.469639 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.469651 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:54Z","lastTransitionTime":"2025-12-13T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.572718 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.572768 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.572777 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.572795 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.572805 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:54Z","lastTransitionTime":"2025-12-13T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.674948 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.674992 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.675002 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.675016 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.675025 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:54Z","lastTransitionTime":"2025-12-13T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.739520 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovnkube-controller/3.log" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.742852 4644 scope.go:117] "RemoveContainer" containerID="ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012" Dec 13 06:46:54 crc kubenswrapper[4644]: E1213 06:46:54.743010 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.753407 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c95f284b-0acb-4663-966d-e8784aba4593\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7cf602b6144f7e94214304ab850074ba871148dc0165f21cb9a8cfba8d06c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc0d51af45093a810195e31aa1e43043ebc2c967fb2f2cc68683a6863d72e889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c421321fe13141866ef6b2747c48845a897a7ea4238c16e92cbcd33fa2f787c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.763671 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.776895 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.776951 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.776960 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.776974 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.776984 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:54Z","lastTransitionTime":"2025-12-13T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.778864 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:53Z\\\",\\\"message\\\":\\\" [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Lo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.789853 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b15a4861-38b1-4144-b0ae-7a079a389221\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ebafa430aea4a3a66bc71ce27f483a2837d24f8ec09015947033c3f026e2c3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08533b1d8a9031533b13f79ae124207214ad01ceab30c981d1610a33dcbc8a23\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71174a04f1e6260045d327811802aa73537dc76a5950cf7cbf324ae5abf8a69c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d70fc4bd12adb166e29eb20cdb3fae3233187c2b0a2e1c94ae167e422f13df4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d6cac43df7048a50bcdfdf424e5a95766811217dd8a9492e0f96bdd47bc3dc9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3753e103f2e17eb5ced1ca487ea84f2627ad7373a9eda4c21e579a2e8dc11c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://95ee0f7da1be4e28dcafd3cfe069da858bd670276fed4178a9a98363fe602ce2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:46:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c446z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-ncsgn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.798638 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce66f48a6686b910d38446a3269dea07876bd05fdbe203e1f085960d5f8bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.810187 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lbk25" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68f72e73eb6fea5f57ed95c448c349e23ad92277af62f67c45217ef0c03e07e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:42Z\\\",\\\"message\\\":\\\"2025-12-13T06:45:57+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2dd0d337-ca42-448f-a3d3-e9fec193beb2\\\\n2025-12-13T06:45:57+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2dd0d337-ca42-448f-a3d3-e9fec193beb2 to /host/opt/cni/bin/\\\\n2025-12-13T06:45:57Z [verbose] multus-daemon started\\\\n2025-12-13T06:45:57Z [verbose] Readiness Indicator file check\\\\n2025-12-13T06:46:42Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c4t56\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lbk25\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.820146 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48240f19-087e-4597-b448-ab1a190a5027\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6ae5af8148e2cfed5d1fa0e69b95c9788d7c1e55c450f663edbaae7ee87fcc2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xxg7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-45tj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.836730 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.846225 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.856013 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.864914 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.872628 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.879283 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.879329 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.879339 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.879357 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.879368 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:54Z","lastTransitionTime":"2025-12-13T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.881877 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.890757 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c88wl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae945e75-99b5-40b6-851d-dc9348056cdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:10Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c88wl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.901800 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.912292 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.922026 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.932499 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:54Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.981651 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.981706 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.981715 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.981732 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:54 crc kubenswrapper[4644]: I1213 06:46:54.981741 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:54Z","lastTransitionTime":"2025-12-13T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.084903 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.084941 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.084951 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.084966 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.084977 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:55Z","lastTransitionTime":"2025-12-13T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.188080 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.188133 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.188143 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.188160 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.188189 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:55Z","lastTransitionTime":"2025-12-13T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.290830 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.290894 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.290906 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.290921 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.290931 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:55Z","lastTransitionTime":"2025-12-13T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.388807 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.388866 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.388821 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:55 crc kubenswrapper[4644]: E1213 06:46:55.388966 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:55 crc kubenswrapper[4644]: E1213 06:46:55.389010 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:55 crc kubenswrapper[4644]: E1213 06:46:55.389068 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.393416 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.393481 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.393495 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.393511 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.393522 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:55Z","lastTransitionTime":"2025-12-13T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.495666 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.495735 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.495744 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.495759 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.495771 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:55Z","lastTransitionTime":"2025-12-13T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.598275 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.598320 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.598330 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.598344 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.598354 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:55Z","lastTransitionTime":"2025-12-13T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.701255 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.701306 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.701315 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.701332 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.701342 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:55Z","lastTransitionTime":"2025-12-13T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.803747 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.803811 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.803822 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.803839 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.803850 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:55Z","lastTransitionTime":"2025-12-13T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.906551 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.906613 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.906625 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.906640 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:55 crc kubenswrapper[4644]: I1213 06:46:55.906650 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:55Z","lastTransitionTime":"2025-12-13T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.009531 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.009582 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.009607 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.009628 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.009639 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:56Z","lastTransitionTime":"2025-12-13T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.112489 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.112532 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.112541 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.112556 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.112566 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:56Z","lastTransitionTime":"2025-12-13T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.214976 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.215018 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.215030 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.215047 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.215059 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:56Z","lastTransitionTime":"2025-12-13T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.317427 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.317498 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.317508 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.317523 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.317536 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:56Z","lastTransitionTime":"2025-12-13T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.389365 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:56 crc kubenswrapper[4644]: E1213 06:46:56.389607 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.419526 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.419566 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.419576 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.419605 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.419614 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:56Z","lastTransitionTime":"2025-12-13T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.521975 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.522025 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.522034 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.522053 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.522063 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:56Z","lastTransitionTime":"2025-12-13T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.623988 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.624035 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.624044 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.624061 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.624071 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:56Z","lastTransitionTime":"2025-12-13T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.725934 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.725974 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.725983 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.725997 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.726007 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:56Z","lastTransitionTime":"2025-12-13T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.828651 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.828695 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.828704 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.828733 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.828745 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:56Z","lastTransitionTime":"2025-12-13T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.931126 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.931175 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.931186 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.931203 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:56 crc kubenswrapper[4644]: I1213 06:46:56.931214 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:56Z","lastTransitionTime":"2025-12-13T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.033838 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.033884 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.033895 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.033913 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.033924 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:57Z","lastTransitionTime":"2025-12-13T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.140881 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.140924 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.140934 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.140950 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.140959 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:57Z","lastTransitionTime":"2025-12-13T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.243227 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.243281 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.243293 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.243308 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.243319 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:57Z","lastTransitionTime":"2025-12-13T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.346073 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.346137 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.346147 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.346162 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.346173 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:57Z","lastTransitionTime":"2025-12-13T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.388339 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.388424 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.388367 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:57 crc kubenswrapper[4644]: E1213 06:46:57.388528 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:57 crc kubenswrapper[4644]: E1213 06:46:57.388670 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:57 crc kubenswrapper[4644]: E1213 06:46:57.388756 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.448633 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.448683 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.448693 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.448710 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.448719 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:57Z","lastTransitionTime":"2025-12-13T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.551962 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.552008 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.552017 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.552033 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.552043 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:57Z","lastTransitionTime":"2025-12-13T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.654314 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.654357 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.654366 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.654383 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.654394 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:57Z","lastTransitionTime":"2025-12-13T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.756151 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.756187 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.756197 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.756210 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.756220 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:57Z","lastTransitionTime":"2025-12-13T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.858064 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.858103 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.858112 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.858128 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.858141 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:57Z","lastTransitionTime":"2025-12-13T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.960465 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.960509 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.960518 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.960535 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:57 crc kubenswrapper[4644]: I1213 06:46:57.960547 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:57Z","lastTransitionTime":"2025-12-13T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.062555 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.062613 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.062622 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.062637 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.062648 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:58Z","lastTransitionTime":"2025-12-13T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.169082 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.169131 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.169141 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.169155 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.169166 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:58Z","lastTransitionTime":"2025-12-13T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.271907 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.271954 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.271965 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.271982 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.271992 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:58Z","lastTransitionTime":"2025-12-13T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.374204 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.374250 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.374260 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.374277 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.374286 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:58Z","lastTransitionTime":"2025-12-13T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.388688 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:46:58 crc kubenswrapper[4644]: E1213 06:46:58.388836 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.405880 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"851aace1-6ecb-4e97-84cf-d8dd9a2f314f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5200b290ecb170c8cef17e46ab1a544eb75e56bee246b10fe4a689351cc0d605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0abbef2c0095f74187c148cbcc7db1edc87a683e29ae3033c4f0e7d348d8d5f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb50b0529ea6ccd631a3cdc0f289762e6ce2e0908a7be00c3f1fca337f8515ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f72e1871d41e40a17754d422900e0dd6d464124d6353b798489bdf658a9daf3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://346554be141a73748792f9c5c8e3e0d3e0f80b9315649b18ff39cd577adf1e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd0f2f0ea22cee58a88e92ada73887245b73f1fd50755ce5a4c54437c49529b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff2627643e27f7fd51ab9c72449e0f484cb68b2400ae996eecaddd9fa2b05d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d746e66b9c7f9d7c9fe7cab23ac0eea54956609c19cb3f700aa07f38bee8e63e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.416540 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559cab05-6a03-471f-9acb-4336551435f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d1c809b1b4b7f93f1f4aa9c451d4cf5fde618ec502b3bc46dfeef6b0917691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d71797e6174106d307f800d2b4469d694f331a315e97e0d6808ddb0fe672be\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1609992bde1fe2ea585007611e37815504dcf15eee709d34ce167429083965ec\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.428158 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.436216 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wvvsb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc018066-67ca-4e62-a670-18812ca830a7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1f1408307faa20f570ed5ac316cedf7af254774e440e4d6c3ec89048e142f4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2z58d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:56Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wvvsb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.445512 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-6lkrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5997b569-2b35-4c1c-bcdd-2d89d9beeefe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://921f7e2c5a0a5c97b2f766cf279ede0b0fab6d04a19f15e4ec1f71bf1d14974c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7blsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-6lkrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.454309 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b95dba08-4180-4513-a763-1c3ac04e7090\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7ba213140d27e158a4b4c01e52ec0c38eccb105e72cc056f701466bee3468e3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ed50bb2ba7b3449256f5818ce0e9e611eb5a418b21f3b7111ccc0f16eae563d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqx2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9zcmb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.463330 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c88wl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae945e75-99b5-40b6-851d-dc9348056cdb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t97gb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:46:10Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c88wl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.475568 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"le observer\\\\nW1213 06:45:55.197937 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1213 06:45:55.198179 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 06:45:55.200676 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1450937542/tls.crt::/tmp/serving-cert-1450937542/tls.key\\\\\\\"\\\\nI1213 06:45:55.363082 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 06:45:55.368543 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 06:45:55.368566 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 06:45:55.368586 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 06:45:55.368591 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 06:45:55.381769 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 06:45:55.381797 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381802 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 06:45:55.381805 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 06:45:55.381808 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 06:45:55.381810 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 06:45:55.381813 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 06:45:55.381955 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 06:45:55.384866 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:40Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.476052 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.476089 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.476100 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.476117 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.476126 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:58Z","lastTransitionTime":"2025-12-13T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.486687 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d2fbb151e5548aff88978daf528a0369f1e527e9e553d4aa42f2b9ccfa381957\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.495739 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.504805 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f71bd71eb73ab98dc83a957d4497775d30ed7f0ab0c168dd2ba94ec712a6bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4992c93fe335ef1c8992510acf5c75ac089861788614aa8f7080a5bf4444ef3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.514871 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c95f284b-0acb-4663-966d-e8784aba4593\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:46:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f7cf602b6144f7e94214304ab850074ba871148dc0165f21cb9a8cfba8d06c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc0d51af45093a810195e31aa1e43043ebc2c967fb2f2cc68683a6863d72e889\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c421321fe13141866ef6b2747c48845a897a7ea4238c16e92cbcd33fa2f787c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be12f052959ffd66fe1de8afd45069a0a237e5c23572954a41a268a1bcca5a52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:39Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:38Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.524133 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.537935 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:45:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T06:46:53Z\\\",\\\"message\\\":\\\" [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Lo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T06:46:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T06:46:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:45:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:45:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zlbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T06:45:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-bj6c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T06:46:58Z is after 2025-08-24T17:21:41Z" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.556879 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-ncsgn" podStartSLOduration=62.55686317 podStartE2EDuration="1m2.55686317s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:46:58.555378246 +0000 UTC m=+80.770329080" watchObservedRunningTime="2025-12-13 06:46:58.55686317 +0000 UTC m=+80.771814002" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.578063 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.578122 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.578132 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.578147 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.578157 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:58Z","lastTransitionTime":"2025-12-13T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.584984 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-lbk25" podStartSLOduration=62.58496945 podStartE2EDuration="1m2.58496945s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:46:58.57538763 +0000 UTC m=+80.790338463" watchObservedRunningTime="2025-12-13 06:46:58.58496945 +0000 UTC m=+80.799920283" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.680337 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.680386 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.680398 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.680417 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.680427 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:58Z","lastTransitionTime":"2025-12-13T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.782927 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.782973 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.782982 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.782996 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.783005 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:58Z","lastTransitionTime":"2025-12-13T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.885426 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.885492 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.885503 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.885520 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.885533 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:58Z","lastTransitionTime":"2025-12-13T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.987421 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.987483 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.987493 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.987507 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:58 crc kubenswrapper[4644]: I1213 06:46:58.987516 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:58Z","lastTransitionTime":"2025-12-13T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.089682 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.089730 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.089739 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.089756 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.089766 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:59Z","lastTransitionTime":"2025-12-13T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.191726 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.191766 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.191775 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.191791 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.191802 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:59Z","lastTransitionTime":"2025-12-13T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.294369 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.294413 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.294422 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.294456 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.294468 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:59Z","lastTransitionTime":"2025-12-13T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.341815 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.341940 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:48:03.341917594 +0000 UTC m=+145.556868428 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.342041 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.342072 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.342102 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.342126 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.342208 4644 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.342250 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:48:03.342242013 +0000 UTC m=+145.557192857 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.342208 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.342318 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.342332 4644 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.342365 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 06:48:03.342354925 +0000 UTC m=+145.557305758 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.342208 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.342389 4644 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.342398 4644 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.342424 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 06:48:03.342416921 +0000 UTC m=+145.557367754 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.342288 4644 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.342491 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 06:48:03.342482734 +0000 UTC m=+145.557433568 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.389021 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.389076 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.389122 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.389153 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.389263 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:46:59 crc kubenswrapper[4644]: E1213 06:46:59.389333 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.397242 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.397269 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.397277 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.397290 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.397299 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:59Z","lastTransitionTime":"2025-12-13T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.499819 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.499858 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.499867 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.499884 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.499896 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:59Z","lastTransitionTime":"2025-12-13T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.602329 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.602375 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.602383 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.602402 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.602412 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:59Z","lastTransitionTime":"2025-12-13T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.704466 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.704719 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.704731 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.704749 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.704761 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:59Z","lastTransitionTime":"2025-12-13T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.807189 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.807235 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.807249 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.807267 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.807278 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:59Z","lastTransitionTime":"2025-12-13T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.909117 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.909166 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.909176 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.909192 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:46:59 crc kubenswrapper[4644]: I1213 06:46:59.909201 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:46:59Z","lastTransitionTime":"2025-12-13T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.011468 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.011505 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.011515 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.011531 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.011540 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:47:00Z","lastTransitionTime":"2025-12-13T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.081080 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.081136 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.081146 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.081159 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.081170 4644 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T06:47:00Z","lastTransitionTime":"2025-12-13T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.115039 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podStartSLOduration=64.115018656 podStartE2EDuration="1m4.115018656s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:46:58.585160147 +0000 UTC m=+80.800110980" watchObservedRunningTime="2025-12-13 06:47:00.115018656 +0000 UTC m=+82.329969489" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.115395 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn"] Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.115854 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.117182 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.117699 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.118494 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.118965 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.131888 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=34.131871393 podStartE2EDuration="34.131871393s" podCreationTimestamp="2025-12-13 06:46:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:00.131642905 +0000 UTC m=+82.346593738" watchObservedRunningTime="2025-12-13 06:47:00.131871393 +0000 UTC m=+82.346822226" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.149640 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/57bdba4f-14a9-4421-907c-ef3e62a4c4e3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-24sxn\" (UID: \"57bdba4f-14a9-4421-907c-ef3e62a4c4e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.149689 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/57bdba4f-14a9-4421-907c-ef3e62a4c4e3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-24sxn\" (UID: \"57bdba4f-14a9-4421-907c-ef3e62a4c4e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.149757 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/57bdba4f-14a9-4421-907c-ef3e62a4c4e3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-24sxn\" (UID: \"57bdba4f-14a9-4421-907c-ef3e62a4c4e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.149859 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/57bdba4f-14a9-4421-907c-ef3e62a4c4e3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-24sxn\" (UID: \"57bdba4f-14a9-4421-907c-ef3e62a4c4e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.173520 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-6lkrr" podStartSLOduration=64.173494497 podStartE2EDuration="1m4.173494497s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:00.173050295 +0000 UTC m=+82.388001129" watchObservedRunningTime="2025-12-13 06:47:00.173494497 +0000 UTC m=+82.388445331" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.183459 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9zcmb" podStartSLOduration=64.183417346 podStartE2EDuration="1m4.183417346s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:00.183311277 +0000 UTC m=+82.398262110" watchObservedRunningTime="2025-12-13 06:47:00.183417346 +0000 UTC m=+82.398368169" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.225497 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=61.225475496 podStartE2EDuration="1m1.225475496s" podCreationTimestamp="2025-12-13 06:45:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:00.212764562 +0000 UTC m=+82.427715395" watchObservedRunningTime="2025-12-13 06:47:00.225475496 +0000 UTC m=+82.440426329" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.226358 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=59.226337973 podStartE2EDuration="59.226337973s" podCreationTimestamp="2025-12-13 06:46:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:00.225156097 +0000 UTC m=+82.440106930" watchObservedRunningTime="2025-12-13 06:47:00.226337973 +0000 UTC m=+82.441288806" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.250853 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/57bdba4f-14a9-4421-907c-ef3e62a4c4e3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-24sxn\" (UID: \"57bdba4f-14a9-4421-907c-ef3e62a4c4e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.250926 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/57bdba4f-14a9-4421-907c-ef3e62a4c4e3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-24sxn\" (UID: \"57bdba4f-14a9-4421-907c-ef3e62a4c4e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.250959 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/57bdba4f-14a9-4421-907c-ef3e62a4c4e3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-24sxn\" (UID: \"57bdba4f-14a9-4421-907c-ef3e62a4c4e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.250999 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/57bdba4f-14a9-4421-907c-ef3e62a4c4e3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-24sxn\" (UID: \"57bdba4f-14a9-4421-907c-ef3e62a4c4e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.251024 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/57bdba4f-14a9-4421-907c-ef3e62a4c4e3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-24sxn\" (UID: \"57bdba4f-14a9-4421-907c-ef3e62a4c4e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.251086 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/57bdba4f-14a9-4421-907c-ef3e62a4c4e3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-24sxn\" (UID: \"57bdba4f-14a9-4421-907c-ef3e62a4c4e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.251361 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/57bdba4f-14a9-4421-907c-ef3e62a4c4e3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-24sxn\" (UID: \"57bdba4f-14a9-4421-907c-ef3e62a4c4e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.252029 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/57bdba4f-14a9-4421-907c-ef3e62a4c4e3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-24sxn\" (UID: \"57bdba4f-14a9-4421-907c-ef3e62a4c4e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.252973 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-wvvsb" podStartSLOduration=64.252944612 podStartE2EDuration="1m4.252944612s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:00.252241234 +0000 UTC m=+82.467192067" watchObservedRunningTime="2025-12-13 06:47:00.252944612 +0000 UTC m=+82.467895445" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.259911 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/57bdba4f-14a9-4421-907c-ef3e62a4c4e3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-24sxn\" (UID: \"57bdba4f-14a9-4421-907c-ef3e62a4c4e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.276612 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=65.276550276 podStartE2EDuration="1m5.276550276s" podCreationTimestamp="2025-12-13 06:45:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:00.264919757 +0000 UTC m=+82.479870589" watchObservedRunningTime="2025-12-13 06:47:00.276550276 +0000 UTC m=+82.491501109" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.352065 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/57bdba4f-14a9-4421-907c-ef3e62a4c4e3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-24sxn\" (UID: \"57bdba4f-14a9-4421-907c-ef3e62a4c4e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.366336 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/57bdba4f-14a9-4421-907c-ef3e62a4c4e3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-24sxn\" (UID: \"57bdba4f-14a9-4421-907c-ef3e62a4c4e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.389309 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:00 crc kubenswrapper[4644]: E1213 06:47:00.389480 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.427885 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.759630 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" event={"ID":"57bdba4f-14a9-4421-907c-ef3e62a4c4e3","Type":"ContainerStarted","Data":"c04cfa7228c69b27bbfcf653071a5c416f1feb9146466d08f469348f0b9752ef"} Dec 13 06:47:00 crc kubenswrapper[4644]: I1213 06:47:00.759923 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" event={"ID":"57bdba4f-14a9-4421-907c-ef3e62a4c4e3","Type":"ContainerStarted","Data":"561d0287fb3c28fc73da9084c67cf55d78e97951242c75b1a694458d7946510d"} Dec 13 06:47:01 crc kubenswrapper[4644]: I1213 06:47:01.389114 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:01 crc kubenswrapper[4644]: I1213 06:47:01.389152 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:01 crc kubenswrapper[4644]: I1213 06:47:01.389219 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:01 crc kubenswrapper[4644]: E1213 06:47:01.389266 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:01 crc kubenswrapper[4644]: E1213 06:47:01.389395 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:01 crc kubenswrapper[4644]: E1213 06:47:01.389489 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:02 crc kubenswrapper[4644]: I1213 06:47:02.388651 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:02 crc kubenswrapper[4644]: E1213 06:47:02.388969 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:03 crc kubenswrapper[4644]: I1213 06:47:03.388593 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:03 crc kubenswrapper[4644]: I1213 06:47:03.388718 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:03 crc kubenswrapper[4644]: E1213 06:47:03.388881 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:03 crc kubenswrapper[4644]: E1213 06:47:03.389012 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:03 crc kubenswrapper[4644]: I1213 06:47:03.389206 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:03 crc kubenswrapper[4644]: E1213 06:47:03.389380 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:03 crc kubenswrapper[4644]: I1213 06:47:03.399420 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-24sxn" podStartSLOduration=67.399405184 podStartE2EDuration="1m7.399405184s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:00.775201132 +0000 UTC m=+82.990151966" watchObservedRunningTime="2025-12-13 06:47:03.399405184 +0000 UTC m=+85.614356017" Dec 13 06:47:03 crc kubenswrapper[4644]: I1213 06:47:03.400214 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 13 06:47:04 crc kubenswrapper[4644]: I1213 06:47:04.388730 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:04 crc kubenswrapper[4644]: E1213 06:47:04.388887 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:05 crc kubenswrapper[4644]: I1213 06:47:05.388496 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:05 crc kubenswrapper[4644]: I1213 06:47:05.388561 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:05 crc kubenswrapper[4644]: I1213 06:47:05.388528 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:05 crc kubenswrapper[4644]: E1213 06:47:05.388675 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:05 crc kubenswrapper[4644]: E1213 06:47:05.388781 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:05 crc kubenswrapper[4644]: E1213 06:47:05.388874 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:06 crc kubenswrapper[4644]: I1213 06:47:06.388785 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:06 crc kubenswrapper[4644]: E1213 06:47:06.388932 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:07 crc kubenswrapper[4644]: I1213 06:47:07.388722 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:07 crc kubenswrapper[4644]: I1213 06:47:07.388756 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:07 crc kubenswrapper[4644]: E1213 06:47:07.388848 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:07 crc kubenswrapper[4644]: E1213 06:47:07.388955 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:07 crc kubenswrapper[4644]: I1213 06:47:07.388729 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:07 crc kubenswrapper[4644]: E1213 06:47:07.389039 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:08 crc kubenswrapper[4644]: I1213 06:47:08.389168 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:08 crc kubenswrapper[4644]: E1213 06:47:08.390043 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:08 crc kubenswrapper[4644]: I1213 06:47:08.399760 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=5.399743989 podStartE2EDuration="5.399743989s" podCreationTimestamp="2025-12-13 06:47:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:08.399080565 +0000 UTC m=+90.614031398" watchObservedRunningTime="2025-12-13 06:47:08.399743989 +0000 UTC m=+90.614694822" Dec 13 06:47:09 crc kubenswrapper[4644]: I1213 06:47:09.388915 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:09 crc kubenswrapper[4644]: I1213 06:47:09.388938 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:09 crc kubenswrapper[4644]: E1213 06:47:09.389049 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:09 crc kubenswrapper[4644]: I1213 06:47:09.389100 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:09 crc kubenswrapper[4644]: E1213 06:47:09.389155 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:09 crc kubenswrapper[4644]: E1213 06:47:09.389321 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:09 crc kubenswrapper[4644]: I1213 06:47:09.389993 4644 scope.go:117] "RemoveContainer" containerID="ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012" Dec 13 06:47:09 crc kubenswrapper[4644]: E1213 06:47:09.390133 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" Dec 13 06:47:10 crc kubenswrapper[4644]: I1213 06:47:10.388670 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:10 crc kubenswrapper[4644]: E1213 06:47:10.388807 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:11 crc kubenswrapper[4644]: I1213 06:47:11.389102 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:11 crc kubenswrapper[4644]: I1213 06:47:11.389168 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:11 crc kubenswrapper[4644]: E1213 06:47:11.389214 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:11 crc kubenswrapper[4644]: I1213 06:47:11.389103 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:11 crc kubenswrapper[4644]: E1213 06:47:11.389279 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:11 crc kubenswrapper[4644]: E1213 06:47:11.389311 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:12 crc kubenswrapper[4644]: I1213 06:47:12.388945 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:12 crc kubenswrapper[4644]: E1213 06:47:12.389092 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:13 crc kubenswrapper[4644]: I1213 06:47:13.388247 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:13 crc kubenswrapper[4644]: I1213 06:47:13.388379 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:13 crc kubenswrapper[4644]: E1213 06:47:13.388518 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:13 crc kubenswrapper[4644]: I1213 06:47:13.388547 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:13 crc kubenswrapper[4644]: E1213 06:47:13.388715 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:13 crc kubenswrapper[4644]: E1213 06:47:13.389018 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:14 crc kubenswrapper[4644]: I1213 06:47:14.388740 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:14 crc kubenswrapper[4644]: E1213 06:47:14.388885 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:14 crc kubenswrapper[4644]: I1213 06:47:14.400633 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs\") pod \"network-metrics-daemon-c88wl\" (UID: \"ae945e75-99b5-40b6-851d-dc9348056cdb\") " pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:14 crc kubenswrapper[4644]: E1213 06:47:14.400768 4644 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:47:14 crc kubenswrapper[4644]: E1213 06:47:14.400843 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs podName:ae945e75-99b5-40b6-851d-dc9348056cdb nodeName:}" failed. No retries permitted until 2025-12-13 06:48:18.400826346 +0000 UTC m=+160.615777179 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs") pod "network-metrics-daemon-c88wl" (UID: "ae945e75-99b5-40b6-851d-dc9348056cdb") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 06:47:15 crc kubenswrapper[4644]: I1213 06:47:15.389049 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:15 crc kubenswrapper[4644]: I1213 06:47:15.389094 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:15 crc kubenswrapper[4644]: E1213 06:47:15.389154 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:15 crc kubenswrapper[4644]: I1213 06:47:15.389231 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:15 crc kubenswrapper[4644]: E1213 06:47:15.389237 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:15 crc kubenswrapper[4644]: E1213 06:47:15.389386 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:16 crc kubenswrapper[4644]: I1213 06:47:16.389272 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:16 crc kubenswrapper[4644]: E1213 06:47:16.389467 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:17 crc kubenswrapper[4644]: I1213 06:47:17.388601 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:17 crc kubenswrapper[4644]: I1213 06:47:17.388610 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:17 crc kubenswrapper[4644]: I1213 06:47:17.388752 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:17 crc kubenswrapper[4644]: E1213 06:47:17.388907 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:17 crc kubenswrapper[4644]: E1213 06:47:17.389026 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:17 crc kubenswrapper[4644]: E1213 06:47:17.389157 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:18 crc kubenswrapper[4644]: I1213 06:47:18.389127 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:18 crc kubenswrapper[4644]: E1213 06:47:18.390200 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:19 crc kubenswrapper[4644]: I1213 06:47:19.388912 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:19 crc kubenswrapper[4644]: I1213 06:47:19.388964 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:19 crc kubenswrapper[4644]: I1213 06:47:19.389018 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:19 crc kubenswrapper[4644]: E1213 06:47:19.389051 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:19 crc kubenswrapper[4644]: E1213 06:47:19.389151 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:19 crc kubenswrapper[4644]: E1213 06:47:19.389227 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:20 crc kubenswrapper[4644]: I1213 06:47:20.388720 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:20 crc kubenswrapper[4644]: E1213 06:47:20.388841 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:21 crc kubenswrapper[4644]: I1213 06:47:21.389164 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:21 crc kubenswrapper[4644]: I1213 06:47:21.389197 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:21 crc kubenswrapper[4644]: I1213 06:47:21.389279 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:21 crc kubenswrapper[4644]: E1213 06:47:21.389349 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:21 crc kubenswrapper[4644]: E1213 06:47:21.389506 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:21 crc kubenswrapper[4644]: E1213 06:47:21.389630 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:22 crc kubenswrapper[4644]: I1213 06:47:22.389415 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:22 crc kubenswrapper[4644]: E1213 06:47:22.389656 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:23 crc kubenswrapper[4644]: I1213 06:47:23.388364 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:23 crc kubenswrapper[4644]: I1213 06:47:23.388412 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:23 crc kubenswrapper[4644]: I1213 06:47:23.388563 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:23 crc kubenswrapper[4644]: E1213 06:47:23.388844 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:23 crc kubenswrapper[4644]: E1213 06:47:23.388971 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:23 crc kubenswrapper[4644]: E1213 06:47:23.389018 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:23 crc kubenswrapper[4644]: I1213 06:47:23.389112 4644 scope.go:117] "RemoveContainer" containerID="ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012" Dec 13 06:47:23 crc kubenswrapper[4644]: E1213 06:47:23.389369 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-bj6c2_openshift-ovn-kubernetes(03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" Dec 13 06:47:24 crc kubenswrapper[4644]: I1213 06:47:24.388418 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:24 crc kubenswrapper[4644]: E1213 06:47:24.388555 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:25 crc kubenswrapper[4644]: I1213 06:47:25.388283 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:25 crc kubenswrapper[4644]: I1213 06:47:25.388334 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:25 crc kubenswrapper[4644]: I1213 06:47:25.388283 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:25 crc kubenswrapper[4644]: E1213 06:47:25.388419 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:25 crc kubenswrapper[4644]: E1213 06:47:25.388521 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:25 crc kubenswrapper[4644]: E1213 06:47:25.388587 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:26 crc kubenswrapper[4644]: I1213 06:47:26.388755 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:26 crc kubenswrapper[4644]: E1213 06:47:26.388906 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:27 crc kubenswrapper[4644]: I1213 06:47:27.389190 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:27 crc kubenswrapper[4644]: I1213 06:47:27.389190 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:27 crc kubenswrapper[4644]: I1213 06:47:27.389963 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:27 crc kubenswrapper[4644]: E1213 06:47:27.390090 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:27 crc kubenswrapper[4644]: E1213 06:47:27.390252 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:27 crc kubenswrapper[4644]: E1213 06:47:27.390441 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:28 crc kubenswrapper[4644]: I1213 06:47:28.389170 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:28 crc kubenswrapper[4644]: E1213 06:47:28.389996 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:29 crc kubenswrapper[4644]: I1213 06:47:29.388745 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:29 crc kubenswrapper[4644]: I1213 06:47:29.388791 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:29 crc kubenswrapper[4644]: I1213 06:47:29.388911 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:29 crc kubenswrapper[4644]: E1213 06:47:29.388916 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:29 crc kubenswrapper[4644]: E1213 06:47:29.389026 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:29 crc kubenswrapper[4644]: E1213 06:47:29.389101 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:29 crc kubenswrapper[4644]: I1213 06:47:29.839518 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lbk25_9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd/kube-multus/1.log" Dec 13 06:47:29 crc kubenswrapper[4644]: I1213 06:47:29.839975 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lbk25_9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd/kube-multus/0.log" Dec 13 06:47:29 crc kubenswrapper[4644]: I1213 06:47:29.840026 4644 generic.go:334] "Generic (PLEG): container finished" podID="9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd" containerID="68f72e73eb6fea5f57ed95c448c349e23ad92277af62f67c45217ef0c03e07e1" exitCode=1 Dec 13 06:47:29 crc kubenswrapper[4644]: I1213 06:47:29.840069 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lbk25" event={"ID":"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd","Type":"ContainerDied","Data":"68f72e73eb6fea5f57ed95c448c349e23ad92277af62f67c45217ef0c03e07e1"} Dec 13 06:47:29 crc kubenswrapper[4644]: I1213 06:47:29.840128 4644 scope.go:117] "RemoveContainer" containerID="f1987f15007998a596abf2c8898fbb70d60af9e5e86274ebc21526bd9ab81e43" Dec 13 06:47:29 crc kubenswrapper[4644]: I1213 06:47:29.840682 4644 scope.go:117] "RemoveContainer" containerID="68f72e73eb6fea5f57ed95c448c349e23ad92277af62f67c45217ef0c03e07e1" Dec 13 06:47:29 crc kubenswrapper[4644]: E1213 06:47:29.840897 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-lbk25_openshift-multus(9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd)\"" pod="openshift-multus/multus-lbk25" podUID="9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd" Dec 13 06:47:30 crc kubenswrapper[4644]: I1213 06:47:30.388872 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:30 crc kubenswrapper[4644]: E1213 06:47:30.389048 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:30 crc kubenswrapper[4644]: I1213 06:47:30.844987 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lbk25_9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd/kube-multus/1.log" Dec 13 06:47:31 crc kubenswrapper[4644]: I1213 06:47:31.389006 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:31 crc kubenswrapper[4644]: I1213 06:47:31.389044 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:31 crc kubenswrapper[4644]: I1213 06:47:31.389080 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:31 crc kubenswrapper[4644]: E1213 06:47:31.389163 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:31 crc kubenswrapper[4644]: E1213 06:47:31.389217 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:31 crc kubenswrapper[4644]: E1213 06:47:31.389327 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:32 crc kubenswrapper[4644]: I1213 06:47:32.388480 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:32 crc kubenswrapper[4644]: E1213 06:47:32.389202 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:33 crc kubenswrapper[4644]: I1213 06:47:33.388725 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:33 crc kubenswrapper[4644]: I1213 06:47:33.388760 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:33 crc kubenswrapper[4644]: E1213 06:47:33.388880 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:33 crc kubenswrapper[4644]: E1213 06:47:33.388966 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:33 crc kubenswrapper[4644]: I1213 06:47:33.388982 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:33 crc kubenswrapper[4644]: E1213 06:47:33.389160 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:34 crc kubenswrapper[4644]: I1213 06:47:34.388811 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:34 crc kubenswrapper[4644]: E1213 06:47:34.388981 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:35 crc kubenswrapper[4644]: I1213 06:47:35.389031 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:35 crc kubenswrapper[4644]: I1213 06:47:35.389109 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:35 crc kubenswrapper[4644]: E1213 06:47:35.389238 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:35 crc kubenswrapper[4644]: I1213 06:47:35.389250 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:35 crc kubenswrapper[4644]: E1213 06:47:35.389527 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:35 crc kubenswrapper[4644]: E1213 06:47:35.389603 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:35 crc kubenswrapper[4644]: I1213 06:47:35.390242 4644 scope.go:117] "RemoveContainer" containerID="ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012" Dec 13 06:47:35 crc kubenswrapper[4644]: I1213 06:47:35.861191 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovnkube-controller/3.log" Dec 13 06:47:35 crc kubenswrapper[4644]: I1213 06:47:35.863643 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerStarted","Data":"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b"} Dec 13 06:47:35 crc kubenswrapper[4644]: I1213 06:47:35.864090 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:47:35 crc kubenswrapper[4644]: I1213 06:47:35.887842 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podStartSLOduration=99.887826904 podStartE2EDuration="1m39.887826904s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:35.887212423 +0000 UTC m=+118.102163256" watchObservedRunningTime="2025-12-13 06:47:35.887826904 +0000 UTC m=+118.102777737" Dec 13 06:47:36 crc kubenswrapper[4644]: I1213 06:47:36.041205 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-c88wl"] Dec 13 06:47:36 crc kubenswrapper[4644]: I1213 06:47:36.041337 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:36 crc kubenswrapper[4644]: E1213 06:47:36.041472 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:37 crc kubenswrapper[4644]: I1213 06:47:37.388870 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:37 crc kubenswrapper[4644]: I1213 06:47:37.388901 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:37 crc kubenswrapper[4644]: E1213 06:47:37.389274 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:37 crc kubenswrapper[4644]: E1213 06:47:37.389402 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:37 crc kubenswrapper[4644]: I1213 06:47:37.388913 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:37 crc kubenswrapper[4644]: E1213 06:47:37.389551 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:38 crc kubenswrapper[4644]: I1213 06:47:38.388786 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:38 crc kubenswrapper[4644]: E1213 06:47:38.389576 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:38 crc kubenswrapper[4644]: E1213 06:47:38.439818 4644 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 13 06:47:38 crc kubenswrapper[4644]: E1213 06:47:38.467589 4644 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 13 06:47:39 crc kubenswrapper[4644]: I1213 06:47:39.389120 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:39 crc kubenswrapper[4644]: I1213 06:47:39.389131 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:39 crc kubenswrapper[4644]: I1213 06:47:39.389416 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:39 crc kubenswrapper[4644]: E1213 06:47:39.389258 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:39 crc kubenswrapper[4644]: E1213 06:47:39.389515 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:39 crc kubenswrapper[4644]: E1213 06:47:39.389598 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:40 crc kubenswrapper[4644]: I1213 06:47:40.388953 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:40 crc kubenswrapper[4644]: E1213 06:47:40.389099 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:40 crc kubenswrapper[4644]: I1213 06:47:40.389325 4644 scope.go:117] "RemoveContainer" containerID="68f72e73eb6fea5f57ed95c448c349e23ad92277af62f67c45217ef0c03e07e1" Dec 13 06:47:40 crc kubenswrapper[4644]: I1213 06:47:40.878437 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lbk25_9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd/kube-multus/1.log" Dec 13 06:47:40 crc kubenswrapper[4644]: I1213 06:47:40.878720 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lbk25" event={"ID":"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd","Type":"ContainerStarted","Data":"9d22616d01fe1e38e58ad0bf123ce6e9fe9fbedeccfd84b0cef0254f22ff59a8"} Dec 13 06:47:41 crc kubenswrapper[4644]: I1213 06:47:41.388682 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:41 crc kubenswrapper[4644]: I1213 06:47:41.388732 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:41 crc kubenswrapper[4644]: I1213 06:47:41.388733 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:41 crc kubenswrapper[4644]: E1213 06:47:41.388814 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:41 crc kubenswrapper[4644]: E1213 06:47:41.388864 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:41 crc kubenswrapper[4644]: E1213 06:47:41.388971 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:42 crc kubenswrapper[4644]: I1213 06:47:42.388877 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:42 crc kubenswrapper[4644]: E1213 06:47:42.389023 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c88wl" podUID="ae945e75-99b5-40b6-851d-dc9348056cdb" Dec 13 06:47:43 crc kubenswrapper[4644]: I1213 06:47:43.388505 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:43 crc kubenswrapper[4644]: I1213 06:47:43.388560 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:43 crc kubenswrapper[4644]: E1213 06:47:43.388750 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 06:47:43 crc kubenswrapper[4644]: I1213 06:47:43.388775 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:43 crc kubenswrapper[4644]: E1213 06:47:43.388832 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 06:47:43 crc kubenswrapper[4644]: E1213 06:47:43.388856 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 06:47:44 crc kubenswrapper[4644]: I1213 06:47:44.389124 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:47:44 crc kubenswrapper[4644]: I1213 06:47:44.391369 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 13 06:47:44 crc kubenswrapper[4644]: I1213 06:47:44.391387 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 13 06:47:45 crc kubenswrapper[4644]: I1213 06:47:45.388227 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:47:45 crc kubenswrapper[4644]: I1213 06:47:45.388276 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:47:45 crc kubenswrapper[4644]: I1213 06:47:45.388364 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:47:45 crc kubenswrapper[4644]: I1213 06:47:45.390056 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 13 06:47:45 crc kubenswrapper[4644]: I1213 06:47:45.390219 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 13 06:47:45 crc kubenswrapper[4644]: I1213 06:47:45.390365 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 13 06:47:45 crc kubenswrapper[4644]: I1213 06:47:45.391401 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.827880 4644 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.856507 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mh84f"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.856897 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.859058 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-jjpl8"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.859573 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.860213 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6xwbf"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.860521 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.862409 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.862617 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.862810 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.863707 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.868703 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.869040 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.870309 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8ktwh"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.870816 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.871147 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.871388 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.871599 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.871707 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.872951 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.873161 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.874012 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.874142 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.874679 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.874796 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.875058 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.875513 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.875650 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vgkx8"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.875918 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.876120 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-4z6jl"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.876186 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vgkx8" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.876354 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.876482 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.877069 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.878022 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.878127 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.880703 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.880926 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.883896 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.890305 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.893706 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.893796 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.894037 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.894414 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.894785 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.894958 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.895106 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.895248 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.895362 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.895468 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.894962 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.895840 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-wfsz4"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.896134 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-dtxqg"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.896368 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.896677 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.896743 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.896935 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.897026 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.897066 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.897148 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.897262 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.897349 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.897752 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.897887 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.897994 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.898084 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.898408 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-dtxqg" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.898754 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.899648 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-shtgr"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.900196 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.903659 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.904916 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.904964 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.905036 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.905103 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.905124 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.905211 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.905250 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.905492 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.905640 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.905806 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.907350 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.907624 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.907838 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.907839 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.909495 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.910111 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.910365 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-d7jk5"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.910936 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-d7jk5" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.912824 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.909522 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.913158 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.930197 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.930832 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.931746 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.932175 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.932485 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.932768 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.933063 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.933511 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.934816 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.934853 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.935072 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.935431 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.936651 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.936683 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.939538 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.939837 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.942675 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.942925 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.943242 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.943373 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.943683 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.944134 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.944663 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.944788 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.944898 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.945096 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.945217 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.945131 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.945749 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.945862 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-c69v8"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.946097 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.946281 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.946608 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.946644 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.946725 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-vfb94"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.946950 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.947017 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.947188 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.947561 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.947571 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.947748 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.947861 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.947999 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.948170 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.948540 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.948582 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.948644 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.948768 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.952721 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.952862 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.952947 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.953781 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-l4cnx"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.954178 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.954285 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.954720 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.954929 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.955136 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.955322 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-l4cnx" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.955632 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.956131 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.960065 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.960305 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.960351 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.963591 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.964359 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.960434 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.966113 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-crmqj"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.960426 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.966527 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9nmb7"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.966710 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.966824 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.967324 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.967559 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9nmb7" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.968164 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.968533 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mrjxs"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.978676 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.979836 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7732124a-a282-433a-83e7-040dd881ac56-etcd-client\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.984852 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.984981 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-encryption-config\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.985183 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/7732124a-a282-433a-83e7-040dd881ac56-etcd-service-ca\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.985272 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-etcd-client\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.985367 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3e906e3d-d501-43fd-baae-8c5b606c7c58-audit\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.980113 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.986137 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-audit-policies\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.980549 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.986287 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/16dd8123-4c76-4fa6-8792-d1fe4a68b782-bound-sa-token\") pod \"ingress-operator-5b745b69d9-m24dn\" (UID: \"16dd8123-4c76-4fa6-8792-d1fe4a68b782\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.986356 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6c1981b4-cec6-4c20-807a-982199dc5f81-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rws8k\" (UID: \"6c1981b4-cec6-4c20-807a-982199dc5f81\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.986388 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8mmp\" (UniqueName: \"kubernetes.io/projected/a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50-kube-api-access-v8mmp\") pod \"machine-api-operator-5694c8668f-jjpl8\" (UID: \"a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.986431 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.986479 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-audit-policies\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.986505 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9315f9b2-8068-4cbb-88ad-5f4046b3dbdc-config\") pod \"console-operator-58897d9998-shtgr\" (UID: \"9315f9b2-8068-4cbb-88ad-5f4046b3dbdc\") " pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.986526 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc32d46-8130-4cd4-a3a6-7e2377a67785-config\") pod \"machine-approver-56656f9798-rpj6t\" (UID: \"8fc32d46-8130-4cd4-a3a6-7e2377a67785\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.986549 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fde8db32-8ec8-4f55-b401-2a796bc1d353-service-ca-bundle\") pod \"authentication-operator-69f744f599-6xwbf\" (UID: \"fde8db32-8ec8-4f55-b401-2a796bc1d353\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.986589 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbj84\" (UniqueName: \"kubernetes.io/projected/16dd8123-4c76-4fa6-8792-d1fe4a68b782-kube-api-access-vbj84\") pod \"ingress-operator-5b745b69d9-m24dn\" (UID: \"16dd8123-4c76-4fa6-8792-d1fe4a68b782\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.986977 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a8d9c95e-97f4-46f8-a3b7-016e7c228f88-profile-collector-cert\") pod \"olm-operator-6b444d44fb-k88nl\" (UID: \"a8d9c95e-97f4-46f8-a3b7-016e7c228f88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.980597 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.981707 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mrjxs" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.981614 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.982286 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.987017 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e906e3d-d501-43fd-baae-8c5b606c7c58-serving-cert\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.988176 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.988305 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/16dd8123-4c76-4fa6-8792-d1fe4a68b782-trusted-ca\") pod \"ingress-operator-5b745b69d9-m24dn\" (UID: \"16dd8123-4c76-4fa6-8792-d1fe4a68b782\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.988366 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.988513 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/6c1981b4-cec6-4c20-807a-982199dc5f81-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rws8k\" (UID: \"6c1981b4-cec6-4c20-807a-982199dc5f81\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.988550 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-service-ca\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.988578 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-trusted-ca-bundle\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.988678 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e906e3d-d501-43fd-baae-8c5b606c7c58-config\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.988738 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgqs8\" (UniqueName: \"kubernetes.io/projected/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-kube-api-access-bgqs8\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.988779 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pktkx\" (UniqueName: \"kubernetes.io/projected/901d2740-8e94-4290-8a30-be45a95d5f74-kube-api-access-pktkx\") pod \"service-ca-operator-777779d784-xmrpm\" (UID: \"901d2740-8e94-4290-8a30-be45a95d5f74\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.988813 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jqgt\" (UniqueName: \"kubernetes.io/projected/f0e9ddf9-d763-42e9-8682-7aa45939292d-kube-api-access-2jqgt\") pod \"openshift-config-operator-7777fb866f-qbqgt\" (UID: \"f0e9ddf9-d763-42e9-8682-7aa45939292d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.988977 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9315f9b2-8068-4cbb-88ad-5f4046b3dbdc-trusted-ca\") pod \"console-operator-58897d9998-shtgr\" (UID: \"9315f9b2-8068-4cbb-88ad-5f4046b3dbdc\") " pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.989041 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x56tq\" (UniqueName: \"kubernetes.io/projected/9315f9b2-8068-4cbb-88ad-5f4046b3dbdc-kube-api-access-x56tq\") pod \"console-operator-58897d9998-shtgr\" (UID: \"9315f9b2-8068-4cbb-88ad-5f4046b3dbdc\") " pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.989720 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-config\") pod \"controller-manager-879f6c89f-mh84f\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.989782 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxsdp\" (UniqueName: \"kubernetes.io/projected/3e62a18e-fc3a-4859-9a12-cd740f5ee8a1-kube-api-access-qxsdp\") pod \"openshift-apiserver-operator-796bbdcf4f-2h6ps\" (UID: \"3e62a18e-fc3a-4859-9a12-cd740f5ee8a1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.989338 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b"] Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.989570 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.989842 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.989898 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smg29\" (UniqueName: \"kubernetes.io/projected/b9059962-adc6-4278-aead-d07a310b9776-kube-api-access-smg29\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.990015 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70f75bc7-a420-41c8-9b46-2cf4039210bd-config\") pod \"kube-apiserver-operator-766d6c64bb-lknvc\" (UID: \"70f75bc7-a420-41c8-9b46-2cf4039210bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.990053 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-client-ca\") pod \"controller-manager-879f6c89f-mh84f\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.990099 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-serving-cert\") pod \"controller-manager-879f6c89f-mh84f\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.990198 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8fc32d46-8130-4cd4-a3a6-7e2377a67785-auth-proxy-config\") pod \"machine-approver-56656f9798-rpj6t\" (UID: \"8fc32d46-8130-4cd4-a3a6-7e2377a67785\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.990263 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfvmq\" (UniqueName: \"kubernetes.io/projected/8fc32d46-8130-4cd4-a3a6-7e2377a67785-kube-api-access-gfvmq\") pod \"machine-approver-56656f9798-rpj6t\" (UID: \"8fc32d46-8130-4cd4-a3a6-7e2377a67785\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.990317 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-config\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.990386 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e906e3d-d501-43fd-baae-8c5b606c7c58-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992032 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-serving-cert\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992121 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992197 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3e906e3d-d501-43fd-baae-8c5b606c7c58-etcd-serving-ca\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992234 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mh84f\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992262 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlscj\" (UniqueName: \"kubernetes.io/projected/a8d9c95e-97f4-46f8-a3b7-016e7c228f88-kube-api-access-jlscj\") pod \"olm-operator-6b444d44fb-k88nl\" (UID: \"a8d9c95e-97f4-46f8-a3b7-016e7c228f88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992286 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/7732124a-a282-433a-83e7-040dd881ac56-etcd-ca\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992359 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-oauth-serving-cert\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992389 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9059962-adc6-4278-aead-d07a310b9776-audit-dir\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992419 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6c1981b4-cec6-4c20-807a-982199dc5f81-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rws8k\" (UID: \"6c1981b4-cec6-4c20-807a-982199dc5f81\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992561 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/901d2740-8e94-4290-8a30-be45a95d5f74-config\") pod \"service-ca-operator-777779d784-xmrpm\" (UID: \"901d2740-8e94-4290-8a30-be45a95d5f74\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992607 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3e906e3d-d501-43fd-baae-8c5b606c7c58-encryption-config\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992664 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vdrk\" (UniqueName: \"kubernetes.io/projected/85a3c5f0-2d32-465d-9c1e-689f35f25507-kube-api-access-4vdrk\") pod \"openshift-controller-manager-operator-756b6f6bc6-lzb4z\" (UID: \"85a3c5f0-2d32-465d-9c1e-689f35f25507\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992702 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70f75bc7-a420-41c8-9b46-2cf4039210bd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-lknvc\" (UID: \"70f75bc7-a420-41c8-9b46-2cf4039210bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992728 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0e9ddf9-d763-42e9-8682-7aa45939292d-serving-cert\") pod \"openshift-config-operator-7777fb866f-qbqgt\" (UID: \"f0e9ddf9-d763-42e9-8682-7aa45939292d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992773 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/901d2740-8e94-4290-8a30-be45a95d5f74-serving-cert\") pod \"service-ca-operator-777779d784-xmrpm\" (UID: \"901d2740-8e94-4290-8a30-be45a95d5f74\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992824 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9315f9b2-8068-4cbb-88ad-5f4046b3dbdc-serving-cert\") pod \"console-operator-58897d9998-shtgr\" (UID: \"9315f9b2-8068-4cbb-88ad-5f4046b3dbdc\") " pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992877 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt4m6\" (UniqueName: \"kubernetes.io/projected/622db6f9-2148-4569-88a6-f37650895811-kube-api-access-tt4m6\") pod \"cluster-samples-operator-665b6dd947-vgkx8\" (UID: \"622db6f9-2148-4569-88a6-f37650895811\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vgkx8" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992927 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgtcd\" (UniqueName: \"kubernetes.io/projected/cd4c2102-9555-4eb4-8d7e-91d8c7020a0e-kube-api-access-lgtcd\") pod \"downloads-7954f5f757-dtxqg\" (UID: \"cd4c2102-9555-4eb4-8d7e-91d8c7020a0e\") " pod="openshift-console/downloads-7954f5f757-dtxqg" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.992953 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/622db6f9-2148-4569-88a6-f37650895811-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-vgkx8\" (UID: \"622db6f9-2148-4569-88a6-f37650895811\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vgkx8" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993032 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993088 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50-config\") pod \"machine-api-operator-5694c8668f-jjpl8\" (UID: \"a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993178 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/33355e8a-5a62-49d4-8c71-ab546cdbf141-client-ca\") pod \"route-controller-manager-6576b87f9c-bmz5w\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993230 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f0e9ddf9-d763-42e9-8682-7aa45939292d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-qbqgt\" (UID: \"f0e9ddf9-d763-42e9-8682-7aa45939292d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993356 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e62a18e-fc3a-4859-9a12-cd740f5ee8a1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-2h6ps\" (UID: \"3e62a18e-fc3a-4859-9a12-cd740f5ee8a1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993400 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-jjpl8\" (UID: \"a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993708 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3e906e3d-d501-43fd-baae-8c5b606c7c58-node-pullsecrets\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993762 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e62a18e-fc3a-4859-9a12-cd740f5ee8a1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-2h6ps\" (UID: \"3e62a18e-fc3a-4859-9a12-cd740f5ee8a1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993793 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/16dd8123-4c76-4fa6-8792-d1fe4a68b782-metrics-tls\") pod \"ingress-operator-5b745b69d9-m24dn\" (UID: \"16dd8123-4c76-4fa6-8792-d1fe4a68b782\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993817 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993844 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/773a896a-a43c-4a09-a6e7-42063bf34606-metrics-tls\") pod \"dns-operator-744455d44c-d7jk5\" (UID: \"773a896a-a43c-4a09-a6e7-42063bf34606\") " pod="openshift-dns-operator/dns-operator-744455d44c-d7jk5" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993867 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5zjn\" (UniqueName: \"kubernetes.io/projected/6c1981b4-cec6-4c20-807a-982199dc5f81-kube-api-access-t5zjn\") pod \"cluster-image-registry-operator-dc59b4c8b-rws8k\" (UID: \"6c1981b4-cec6-4c20-807a-982199dc5f81\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993892 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fde8db32-8ec8-4f55-b401-2a796bc1d353-config\") pod \"authentication-operator-69f744f599-6xwbf\" (UID: \"fde8db32-8ec8-4f55-b401-2a796bc1d353\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993912 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3e906e3d-d501-43fd-baae-8c5b606c7c58-audit-dir\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993931 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7732124a-a282-433a-83e7-040dd881ac56-serving-cert\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993952 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csgmx\" (UniqueName: \"kubernetes.io/projected/7732124a-a282-433a-83e7-040dd881ac56-kube-api-access-csgmx\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.993971 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xd8ml\" (UniqueName: \"kubernetes.io/projected/33355e8a-5a62-49d4-8c71-ab546cdbf141-kube-api-access-xd8ml\") pod \"route-controller-manager-6576b87f9c-bmz5w\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994058 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994103 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70f75bc7-a420-41c8-9b46-2cf4039210bd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-lknvc\" (UID: \"70f75bc7-a420-41c8-9b46-2cf4039210bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994146 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7v52\" (UniqueName: \"kubernetes.io/projected/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-kube-api-access-k7v52\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994181 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994217 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85a3c5f0-2d32-465d-9c1e-689f35f25507-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-lzb4z\" (UID: \"85a3c5f0-2d32-465d-9c1e-689f35f25507\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994246 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7732124a-a282-433a-83e7-040dd881ac56-config\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994263 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3e906e3d-d501-43fd-baae-8c5b606c7c58-image-import-ca\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994309 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85a3c5f0-2d32-465d-9c1e-689f35f25507-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-lzb4z\" (UID: \"85a3c5f0-2d32-465d-9c1e-689f35f25507\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994341 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a8d9c95e-97f4-46f8-a3b7-016e7c228f88-srv-cert\") pod \"olm-operator-6b444d44fb-k88nl\" (UID: \"a8d9c95e-97f4-46f8-a3b7-016e7c228f88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994586 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33355e8a-5a62-49d4-8c71-ab546cdbf141-config\") pod \"route-controller-manager-6576b87f9c-bmz5w\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994649 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-serving-cert\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994680 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994714 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994742 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-audit-dir\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994773 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvhrk\" (UniqueName: \"kubernetes.io/projected/773a896a-a43c-4a09-a6e7-42063bf34606-kube-api-access-qvhrk\") pod \"dns-operator-744455d44c-d7jk5\" (UID: \"773a896a-a43c-4a09-a6e7-42063bf34606\") " pod="openshift-dns-operator/dns-operator-744455d44c-d7jk5" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994829 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fde8db32-8ec8-4f55-b401-2a796bc1d353-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6xwbf\" (UID: \"fde8db32-8ec8-4f55-b401-2a796bc1d353\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994853 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fde8db32-8ec8-4f55-b401-2a796bc1d353-serving-cert\") pod \"authentication-operator-69f744f599-6xwbf\" (UID: \"fde8db32-8ec8-4f55-b401-2a796bc1d353\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994875 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cdrk\" (UniqueName: \"kubernetes.io/projected/fde8db32-8ec8-4f55-b401-2a796bc1d353-kube-api-access-9cdrk\") pod \"authentication-operator-69f744f599-6xwbf\" (UID: \"fde8db32-8ec8-4f55-b401-2a796bc1d353\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994896 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33355e8a-5a62-49d4-8c71-ab546cdbf141-serving-cert\") pod \"route-controller-manager-6576b87f9c-bmz5w\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.994937 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.995025 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3e906e3d-d501-43fd-baae-8c5b606c7c58-etcd-client\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.995072 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/8fc32d46-8130-4cd4-a3a6-7e2377a67785-machine-approver-tls\") pod \"machine-approver-56656f9798-rpj6t\" (UID: \"8fc32d46-8130-4cd4-a3a6-7e2377a67785\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.995136 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50-images\") pod \"machine-api-operator-5694c8668f-jjpl8\" (UID: \"a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.995168 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-oauth-config\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.995258 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcg78\" (UniqueName: \"kubernetes.io/projected/3e906e3d-d501-43fd-baae-8c5b606c7c58-kube-api-access-jcg78\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:50 crc kubenswrapper[4644]: I1213 06:47:50.995342 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqmnn\" (UniqueName: \"kubernetes.io/projected/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-kube-api-access-tqmnn\") pod \"controller-manager-879f6c89f-mh84f\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.001605 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.008807 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2rfck"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.009031 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.009694 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-wqmsp"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.009779 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.010507 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mh84f"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.010599 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6xwbf"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.010671 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8ktwh"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.010735 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-hmhwd"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.010690 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-wqmsp" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.011805 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-jjpl8"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.011894 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-4z6jl"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.011940 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.011953 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.012069 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.012127 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-dtxqg"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.012519 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.013565 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.014484 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-wfsz4"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.015404 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.016262 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vgkx8"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.017230 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-l4cnx"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.017908 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.018346 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.019121 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-d7jk5"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.020117 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.021151 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.022522 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-shtgr"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.023269 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.024100 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.026262 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-c69v8"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.027062 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9nmb7"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.027969 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.029391 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.030205 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.030992 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.031852 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-crmqj"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.032912 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-hmhwd"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.033761 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-wqmsp"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.034896 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mrjxs"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.035729 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.036511 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.037854 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.038739 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.038929 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-cvf68"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.040082 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-cvf68" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.040100 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.041071 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.042091 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-hn5l6"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.042948 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2rfck"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.043047 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-hn5l6" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.044063 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-hn5l6"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.058502 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.080116 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.082746 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-h4qgz"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.086334 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-h4qgz" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.088090 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-h4qgz"] Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096265 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d8d1851-76f3-46ad-8637-738acb08ea9b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pdwp4\" (UID: \"9d8d1851-76f3-46ad-8637-738acb08ea9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096333 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/7732124a-a282-433a-83e7-040dd881ac56-etcd-service-ca\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096364 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6c1981b4-cec6-4c20-807a-982199dc5f81-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rws8k\" (UID: \"6c1981b4-cec6-4c20-807a-982199dc5f81\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096383 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-registration-dir\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096406 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a8d9c95e-97f4-46f8-a3b7-016e7c228f88-profile-collector-cert\") pod \"olm-operator-6b444d44fb-k88nl\" (UID: \"a8d9c95e-97f4-46f8-a3b7-016e7c228f88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096425 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e906e3d-d501-43fd-baae-8c5b606c7c58-serving-cert\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096465 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096485 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/16dd8123-4c76-4fa6-8792-d1fe4a68b782-trusted-ca\") pod \"ingress-operator-5b745b69d9-m24dn\" (UID: \"16dd8123-4c76-4fa6-8792-d1fe4a68b782\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096505 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/6c1981b4-cec6-4c20-807a-982199dc5f81-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rws8k\" (UID: \"6c1981b4-cec6-4c20-807a-982199dc5f81\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096528 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9d8d1851-76f3-46ad-8637-738acb08ea9b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pdwp4\" (UID: \"9d8d1851-76f3-46ad-8637-738acb08ea9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096550 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-trusted-ca-bundle\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096573 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096592 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e906e3d-d501-43fd-baae-8c5b606c7c58-config\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096610 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgqs8\" (UniqueName: \"kubernetes.io/projected/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-kube-api-access-bgqs8\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096630 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pktkx\" (UniqueName: \"kubernetes.io/projected/901d2740-8e94-4290-8a30-be45a95d5f74-kube-api-access-pktkx\") pod \"service-ca-operator-777779d784-xmrpm\" (UID: \"901d2740-8e94-4290-8a30-be45a95d5f74\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096649 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jqgt\" (UniqueName: \"kubernetes.io/projected/f0e9ddf9-d763-42e9-8682-7aa45939292d-kube-api-access-2jqgt\") pod \"openshift-config-operator-7777fb866f-qbqgt\" (UID: \"f0e9ddf9-d763-42e9-8682-7aa45939292d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096666 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9315f9b2-8068-4cbb-88ad-5f4046b3dbdc-trusted-ca\") pod \"console-operator-58897d9998-shtgr\" (UID: \"9315f9b2-8068-4cbb-88ad-5f4046b3dbdc\") " pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096683 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x56tq\" (UniqueName: \"kubernetes.io/projected/9315f9b2-8068-4cbb-88ad-5f4046b3dbdc-kube-api-access-x56tq\") pod \"console-operator-58897d9998-shtgr\" (UID: \"9315f9b2-8068-4cbb-88ad-5f4046b3dbdc\") " pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096702 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxsdp\" (UniqueName: \"kubernetes.io/projected/3e62a18e-fc3a-4859-9a12-cd740f5ee8a1-kube-api-access-qxsdp\") pod \"openshift-apiserver-operator-796bbdcf4f-2h6ps\" (UID: \"3e62a18e-fc3a-4859-9a12-cd740f5ee8a1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096730 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096746 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70f75bc7-a420-41c8-9b46-2cf4039210bd-config\") pod \"kube-apiserver-operator-766d6c64bb-lknvc\" (UID: \"70f75bc7-a420-41c8-9b46-2cf4039210bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096761 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-client-ca\") pod \"controller-manager-879f6c89f-mh84f\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096778 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8fc32d46-8130-4cd4-a3a6-7e2377a67785-auth-proxy-config\") pod \"machine-approver-56656f9798-rpj6t\" (UID: \"8fc32d46-8130-4cd4-a3a6-7e2377a67785\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096795 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfvmq\" (UniqueName: \"kubernetes.io/projected/8fc32d46-8130-4cd4-a3a6-7e2377a67785-kube-api-access-gfvmq\") pod \"machine-approver-56656f9798-rpj6t\" (UID: \"8fc32d46-8130-4cd4-a3a6-7e2377a67785\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096811 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-config\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096831 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096848 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3e906e3d-d501-43fd-baae-8c5b606c7c58-etcd-serving-ca\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096866 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8792f\" (UniqueName: \"kubernetes.io/projected/29418836-a9b6-42c2-90b1-755ff73fe3fa-kube-api-access-8792f\") pod \"control-plane-machine-set-operator-78cbb6b69f-mrjxs\" (UID: \"29418836-a9b6-42c2-90b1-755ff73fe3fa\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mrjxs" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096886 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/7732124a-a282-433a-83e7-040dd881ac56-etcd-ca\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096901 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9059962-adc6-4278-aead-d07a310b9776-audit-dir\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096916 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3f1dbe36-7205-4fac-9ce9-0194d29a6bba-default-certificate\") pod \"router-default-5444994796-vfb94\" (UID: \"3f1dbe36-7205-4fac-9ce9-0194d29a6bba\") " pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096932 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f1dbe36-7205-4fac-9ce9-0194d29a6bba-service-ca-bundle\") pod \"router-default-5444994796-vfb94\" (UID: \"3f1dbe36-7205-4fac-9ce9-0194d29a6bba\") " pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096951 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3e906e3d-d501-43fd-baae-8c5b606c7c58-encryption-config\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096968 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70f75bc7-a420-41c8-9b46-2cf4039210bd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-lknvc\" (UID: \"70f75bc7-a420-41c8-9b46-2cf4039210bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.096983 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-socket-dir\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097002 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0e9ddf9-d763-42e9-8682-7aa45939292d-serving-cert\") pod \"openshift-config-operator-7777fb866f-qbqgt\" (UID: \"f0e9ddf9-d763-42e9-8682-7aa45939292d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097022 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9315f9b2-8068-4cbb-88ad-5f4046b3dbdc-serving-cert\") pod \"console-operator-58897d9998-shtgr\" (UID: \"9315f9b2-8068-4cbb-88ad-5f4046b3dbdc\") " pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097039 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt4m6\" (UniqueName: \"kubernetes.io/projected/622db6f9-2148-4569-88a6-f37650895811-kube-api-access-tt4m6\") pod \"cluster-samples-operator-665b6dd947-vgkx8\" (UID: \"622db6f9-2148-4569-88a6-f37650895811\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vgkx8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097058 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgtcd\" (UniqueName: \"kubernetes.io/projected/cd4c2102-9555-4eb4-8d7e-91d8c7020a0e-kube-api-access-lgtcd\") pod \"downloads-7954f5f757-dtxqg\" (UID: \"cd4c2102-9555-4eb4-8d7e-91d8c7020a0e\") " pod="openshift-console/downloads-7954f5f757-dtxqg" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097073 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/622db6f9-2148-4569-88a6-f37650895811-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-vgkx8\" (UID: \"622db6f9-2148-4569-88a6-f37650895811\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vgkx8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097093 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097116 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/33355e8a-5a62-49d4-8c71-ab546cdbf141-client-ca\") pod \"route-controller-manager-6576b87f9c-bmz5w\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097134 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f0e9ddf9-d763-42e9-8682-7aa45939292d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-qbqgt\" (UID: \"f0e9ddf9-d763-42e9-8682-7aa45939292d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097166 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097183 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/773a896a-a43c-4a09-a6e7-42063bf34606-metrics-tls\") pod \"dns-operator-744455d44c-d7jk5\" (UID: \"773a896a-a43c-4a09-a6e7-42063bf34606\") " pod="openshift-dns-operator/dns-operator-744455d44c-d7jk5" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097200 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fde8db32-8ec8-4f55-b401-2a796bc1d353-config\") pod \"authentication-operator-69f744f599-6xwbf\" (UID: \"fde8db32-8ec8-4f55-b401-2a796bc1d353\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097217 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3e906e3d-d501-43fd-baae-8c5b606c7c58-audit-dir\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097234 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7732124a-a282-433a-83e7-040dd881ac56-serving-cert\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097251 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csgmx\" (UniqueName: \"kubernetes.io/projected/7732124a-a282-433a-83e7-040dd881ac56-kube-api-access-csgmx\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097269 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097286 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70f75bc7-a420-41c8-9b46-2cf4039210bd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-lknvc\" (UID: \"70f75bc7-a420-41c8-9b46-2cf4039210bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097318 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7732124a-a282-433a-83e7-040dd881ac56-config\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097333 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3e906e3d-d501-43fd-baae-8c5b606c7c58-image-import-ca\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097353 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85a3c5f0-2d32-465d-9c1e-689f35f25507-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-lzb4z\" (UID: \"85a3c5f0-2d32-465d-9c1e-689f35f25507\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097371 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f1dbe36-7205-4fac-9ce9-0194d29a6bba-metrics-certs\") pod \"router-default-5444994796-vfb94\" (UID: \"3f1dbe36-7205-4fac-9ce9-0194d29a6bba\") " pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097392 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33355e8a-5a62-49d4-8c71-ab546cdbf141-config\") pod \"route-controller-manager-6576b87f9c-bmz5w\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097411 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097428 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fde8db32-8ec8-4f55-b401-2a796bc1d353-serving-cert\") pod \"authentication-operator-69f744f599-6xwbf\" (UID: \"fde8db32-8ec8-4f55-b401-2a796bc1d353\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097461 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/8fc32d46-8130-4cd4-a3a6-7e2377a67785-machine-approver-tls\") pod \"machine-approver-56656f9798-rpj6t\" (UID: \"8fc32d46-8130-4cd4-a3a6-7e2377a67785\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097485 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3f1dbe36-7205-4fac-9ce9-0194d29a6bba-stats-auth\") pod \"router-default-5444994796-vfb94\" (UID: \"3f1dbe36-7205-4fac-9ce9-0194d29a6bba\") " pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097503 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50-images\") pod \"machine-api-operator-5694c8668f-jjpl8\" (UID: \"a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097520 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqzvh\" (UniqueName: \"kubernetes.io/projected/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-kube-api-access-hqzvh\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097540 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcg78\" (UniqueName: \"kubernetes.io/projected/3e906e3d-d501-43fd-baae-8c5b606c7c58-kube-api-access-jcg78\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097577 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6c1981b4-cec6-4c20-807a-982199dc5f81-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rws8k\" (UID: \"6c1981b4-cec6-4c20-807a-982199dc5f81\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097589 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7732124a-a282-433a-83e7-040dd881ac56-etcd-client\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097612 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097631 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-encryption-config\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097651 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-etcd-client\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097668 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-plugins-dir\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097686 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d8d1851-76f3-46ad-8637-738acb08ea9b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pdwp4\" (UID: \"9d8d1851-76f3-46ad-8637-738acb08ea9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097703 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3e906e3d-d501-43fd-baae-8c5b606c7c58-audit\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097719 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-audit-policies\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097735 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/16dd8123-4c76-4fa6-8792-d1fe4a68b782-bound-sa-token\") pod \"ingress-operator-5b745b69d9-m24dn\" (UID: \"16dd8123-4c76-4fa6-8792-d1fe4a68b782\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097752 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8mmp\" (UniqueName: \"kubernetes.io/projected/a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50-kube-api-access-v8mmp\") pod \"machine-api-operator-5694c8668f-jjpl8\" (UID: \"a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097771 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097786 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-audit-policies\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097806 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9315f9b2-8068-4cbb-88ad-5f4046b3dbdc-config\") pod \"console-operator-58897d9998-shtgr\" (UID: \"9315f9b2-8068-4cbb-88ad-5f4046b3dbdc\") " pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097821 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc32d46-8130-4cd4-a3a6-7e2377a67785-config\") pod \"machine-approver-56656f9798-rpj6t\" (UID: \"8fc32d46-8130-4cd4-a3a6-7e2377a67785\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097839 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fde8db32-8ec8-4f55-b401-2a796bc1d353-service-ca-bundle\") pod \"authentication-operator-69f744f599-6xwbf\" (UID: \"fde8db32-8ec8-4f55-b401-2a796bc1d353\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097855 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbj84\" (UniqueName: \"kubernetes.io/projected/16dd8123-4c76-4fa6-8792-d1fe4a68b782-kube-api-access-vbj84\") pod \"ingress-operator-5b745b69d9-m24dn\" (UID: \"16dd8123-4c76-4fa6-8792-d1fe4a68b782\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097872 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-service-ca\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097893 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8whg\" (UniqueName: \"kubernetes.io/projected/3f1dbe36-7205-4fac-9ce9-0194d29a6bba-kube-api-access-j8whg\") pod \"router-default-5444994796-vfb94\" (UID: \"3f1dbe36-7205-4fac-9ce9-0194d29a6bba\") " pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097916 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-config\") pod \"controller-manager-879f6c89f-mh84f\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097935 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smg29\" (UniqueName: \"kubernetes.io/projected/b9059962-adc6-4278-aead-d07a310b9776-kube-api-access-smg29\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097957 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-serving-cert\") pod \"controller-manager-879f6c89f-mh84f\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097979 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e906e3d-d501-43fd-baae-8c5b606c7c58-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.097997 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-serving-cert\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098023 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mh84f\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098041 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlscj\" (UniqueName: \"kubernetes.io/projected/a8d9c95e-97f4-46f8-a3b7-016e7c228f88-kube-api-access-jlscj\") pod \"olm-operator-6b444d44fb-k88nl\" (UID: \"a8d9c95e-97f4-46f8-a3b7-016e7c228f88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098059 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-oauth-serving-cert\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098076 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6c1981b4-cec6-4c20-807a-982199dc5f81-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rws8k\" (UID: \"6c1981b4-cec6-4c20-807a-982199dc5f81\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098094 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/901d2740-8e94-4290-8a30-be45a95d5f74-config\") pod \"service-ca-operator-777779d784-xmrpm\" (UID: \"901d2740-8e94-4290-8a30-be45a95d5f74\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098114 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vdrk\" (UniqueName: \"kubernetes.io/projected/85a3c5f0-2d32-465d-9c1e-689f35f25507-kube-api-access-4vdrk\") pod \"openshift-controller-manager-operator-756b6f6bc6-lzb4z\" (UID: \"85a3c5f0-2d32-465d-9c1e-689f35f25507\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098133 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/901d2740-8e94-4290-8a30-be45a95d5f74-serving-cert\") pod \"service-ca-operator-777779d784-xmrpm\" (UID: \"901d2740-8e94-4290-8a30-be45a95d5f74\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098159 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50-config\") pod \"machine-api-operator-5694c8668f-jjpl8\" (UID: \"a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098178 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e62a18e-fc3a-4859-9a12-cd740f5ee8a1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-2h6ps\" (UID: \"3e62a18e-fc3a-4859-9a12-cd740f5ee8a1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098196 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-jjpl8\" (UID: \"a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098214 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3e906e3d-d501-43fd-baae-8c5b606c7c58-node-pullsecrets\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098230 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e62a18e-fc3a-4859-9a12-cd740f5ee8a1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-2h6ps\" (UID: \"3e62a18e-fc3a-4859-9a12-cd740f5ee8a1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098851 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/16dd8123-4c76-4fa6-8792-d1fe4a68b782-metrics-tls\") pod \"ingress-operator-5b745b69d9-m24dn\" (UID: \"16dd8123-4c76-4fa6-8792-d1fe4a68b782\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098880 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-csi-data-dir\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098908 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5zjn\" (UniqueName: \"kubernetes.io/projected/6c1981b4-cec6-4c20-807a-982199dc5f81-kube-api-access-t5zjn\") pod \"cluster-image-registry-operator-dc59b4c8b-rws8k\" (UID: \"6c1981b4-cec6-4c20-807a-982199dc5f81\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098934 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xd8ml\" (UniqueName: \"kubernetes.io/projected/33355e8a-5a62-49d4-8c71-ab546cdbf141-kube-api-access-xd8ml\") pod \"route-controller-manager-6576b87f9c-bmz5w\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099004 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099024 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85a3c5f0-2d32-465d-9c1e-689f35f25507-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-lzb4z\" (UID: \"85a3c5f0-2d32-465d-9c1e-689f35f25507\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099043 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/29418836-a9b6-42c2-90b1-755ff73fe3fa-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-mrjxs\" (UID: \"29418836-a9b6-42c2-90b1-755ff73fe3fa\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mrjxs" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099064 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-mountpoint-dir\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099083 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7v52\" (UniqueName: \"kubernetes.io/projected/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-kube-api-access-k7v52\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099107 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a8d9c95e-97f4-46f8-a3b7-016e7c228f88-srv-cert\") pod \"olm-operator-6b444d44fb-k88nl\" (UID: \"a8d9c95e-97f4-46f8-a3b7-016e7c228f88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099125 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-serving-cert\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099140 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099156 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-audit-dir\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099178 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvhrk\" (UniqueName: \"kubernetes.io/projected/773a896a-a43c-4a09-a6e7-42063bf34606-kube-api-access-qvhrk\") pod \"dns-operator-744455d44c-d7jk5\" (UID: \"773a896a-a43c-4a09-a6e7-42063bf34606\") " pod="openshift-dns-operator/dns-operator-744455d44c-d7jk5" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099199 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fde8db32-8ec8-4f55-b401-2a796bc1d353-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6xwbf\" (UID: \"fde8db32-8ec8-4f55-b401-2a796bc1d353\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099217 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cdrk\" (UniqueName: \"kubernetes.io/projected/fde8db32-8ec8-4f55-b401-2a796bc1d353-kube-api-access-9cdrk\") pod \"authentication-operator-69f744f599-6xwbf\" (UID: \"fde8db32-8ec8-4f55-b401-2a796bc1d353\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099234 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33355e8a-5a62-49d4-8c71-ab546cdbf141-serving-cert\") pod \"route-controller-manager-6576b87f9c-bmz5w\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099254 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099283 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3e906e3d-d501-43fd-baae-8c5b606c7c58-etcd-client\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099315 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-oauth-config\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099333 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqmnn\" (UniqueName: \"kubernetes.io/projected/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-kube-api-access-tqmnn\") pod \"controller-manager-879f6c89f-mh84f\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099430 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70f75bc7-a420-41c8-9b46-2cf4039210bd-config\") pod \"kube-apiserver-operator-766d6c64bb-lknvc\" (UID: \"70f75bc7-a420-41c8-9b46-2cf4039210bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099492 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-trusted-ca-bundle\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.099566 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9315f9b2-8068-4cbb-88ad-5f4046b3dbdc-trusted-ca\") pod \"console-operator-58897d9998-shtgr\" (UID: \"9315f9b2-8068-4cbb-88ad-5f4046b3dbdc\") " pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.100146 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3e906e3d-d501-43fd-baae-8c5b606c7c58-audit\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.100192 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-audit-policies\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.100366 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e906e3d-d501-43fd-baae-8c5b606c7c58-config\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.100570 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/16dd8123-4c76-4fa6-8792-d1fe4a68b782-trusted-ca\") pod \"ingress-operator-5b745b69d9-m24dn\" (UID: \"16dd8123-4c76-4fa6-8792-d1fe4a68b782\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.101032 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9059962-adc6-4278-aead-d07a310b9776-audit-dir\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.101188 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8fc32d46-8130-4cd4-a3a6-7e2377a67785-auth-proxy-config\") pod \"machine-approver-56656f9798-rpj6t\" (UID: \"8fc32d46-8130-4cd4-a3a6-7e2377a67785\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.101214 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e62a18e-fc3a-4859-9a12-cd740f5ee8a1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-2h6ps\" (UID: \"3e62a18e-fc3a-4859-9a12-cd740f5ee8a1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.098850 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-client-ca\") pod \"controller-manager-879f6c89f-mh84f\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.102022 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3e906e3d-d501-43fd-baae-8c5b606c7c58-etcd-serving-ca\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.102180 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.102267 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.102714 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3e906e3d-d501-43fd-baae-8c5b606c7c58-node-pullsecrets\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.103395 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.103688 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.103842 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/6c1981b4-cec6-4c20-807a-982199dc5f81-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rws8k\" (UID: \"6c1981b4-cec6-4c20-807a-982199dc5f81\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.103870 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.104083 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fde8db32-8ec8-4f55-b401-2a796bc1d353-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6xwbf\" (UID: \"fde8db32-8ec8-4f55-b401-2a796bc1d353\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.104112 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fde8db32-8ec8-4f55-b401-2a796bc1d353-config\") pod \"authentication-operator-69f744f599-6xwbf\" (UID: \"fde8db32-8ec8-4f55-b401-2a796bc1d353\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.104161 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9315f9b2-8068-4cbb-88ad-5f4046b3dbdc-serving-cert\") pod \"console-operator-58897d9998-shtgr\" (UID: \"9315f9b2-8068-4cbb-88ad-5f4046b3dbdc\") " pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.104766 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mh84f\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.105269 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50-images\") pod \"machine-api-operator-5694c8668f-jjpl8\" (UID: \"a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.105410 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc32d46-8130-4cd4-a3a6-7e2377a67785-config\") pod \"machine-approver-56656f9798-rpj6t\" (UID: \"8fc32d46-8130-4cd4-a3a6-7e2377a67785\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.105431 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3e906e3d-d501-43fd-baae-8c5b606c7c58-audit-dir\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.105492 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.105655 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9315f9b2-8068-4cbb-88ad-5f4046b3dbdc-config\") pod \"console-operator-58897d9998-shtgr\" (UID: \"9315f9b2-8068-4cbb-88ad-5f4046b3dbdc\") " pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.105915 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fde8db32-8ec8-4f55-b401-2a796bc1d353-service-ca-bundle\") pod \"authentication-operator-69f744f599-6xwbf\" (UID: \"fde8db32-8ec8-4f55-b401-2a796bc1d353\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.106096 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.106607 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85a3c5f0-2d32-465d-9c1e-689f35f25507-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-lzb4z\" (UID: \"85a3c5f0-2d32-465d-9c1e-689f35f25507\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.106606 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-service-ca\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.106643 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.106637 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50-config\") pod \"machine-api-operator-5694c8668f-jjpl8\" (UID: \"a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.106682 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3e906e3d-d501-43fd-baae-8c5b606c7c58-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.107015 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0e9ddf9-d763-42e9-8682-7aa45939292d-serving-cert\") pod \"openshift-config-operator-7777fb866f-qbqgt\" (UID: \"f0e9ddf9-d763-42e9-8682-7aa45939292d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.107237 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.107346 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/f0e9ddf9-d763-42e9-8682-7aa45939292d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-qbqgt\" (UID: \"f0e9ddf9-d763-42e9-8682-7aa45939292d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.107361 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-config\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.107895 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3e906e3d-d501-43fd-baae-8c5b606c7c58-etcd-client\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.108159 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/33355e8a-5a62-49d4-8c71-ab546cdbf141-client-ca\") pod \"route-controller-manager-6576b87f9c-bmz5w\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.108206 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-audit-dir\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.108339 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-oauth-serving-cert\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.108793 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-jjpl8\" (UID: \"a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.108989 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.109110 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-config\") pod \"controller-manager-879f6c89f-mh84f\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.109610 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3e906e3d-d501-43fd-baae-8c5b606c7c58-image-import-ca\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.109742 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fde8db32-8ec8-4f55-b401-2a796bc1d353-serving-cert\") pod \"authentication-operator-69f744f599-6xwbf\" (UID: \"fde8db32-8ec8-4f55-b401-2a796bc1d353\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.109777 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e906e3d-d501-43fd-baae-8c5b606c7c58-serving-cert\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.109926 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33355e8a-5a62-49d4-8c71-ab546cdbf141-config\") pod \"route-controller-manager-6576b87f9c-bmz5w\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.110134 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e62a18e-fc3a-4859-9a12-cd740f5ee8a1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-2h6ps\" (UID: \"3e62a18e-fc3a-4859-9a12-cd740f5ee8a1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.110403 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-serving-cert\") pod \"controller-manager-879f6c89f-mh84f\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.110508 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/8fc32d46-8130-4cd4-a3a6-7e2377a67785-machine-approver-tls\") pod \"machine-approver-56656f9798-rpj6t\" (UID: \"8fc32d46-8130-4cd4-a3a6-7e2377a67785\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.110520 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/773a896a-a43c-4a09-a6e7-42063bf34606-metrics-tls\") pod \"dns-operator-744455d44c-d7jk5\" (UID: \"773a896a-a43c-4a09-a6e7-42063bf34606\") " pod="openshift-dns-operator/dns-operator-744455d44c-d7jk5" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.110875 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.111169 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85a3c5f0-2d32-465d-9c1e-689f35f25507-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-lzb4z\" (UID: \"85a3c5f0-2d32-465d-9c1e-689f35f25507\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.111588 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-serving-cert\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.111607 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.111784 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/622db6f9-2148-4569-88a6-f37650895811-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-vgkx8\" (UID: \"622db6f9-2148-4569-88a6-f37650895811\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vgkx8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.111914 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70f75bc7-a420-41c8-9b46-2cf4039210bd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-lknvc\" (UID: \"70f75bc7-a420-41c8-9b46-2cf4039210bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.112026 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-oauth-config\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.112227 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a8d9c95e-97f4-46f8-a3b7-016e7c228f88-profile-collector-cert\") pod \"olm-operator-6b444d44fb-k88nl\" (UID: \"a8d9c95e-97f4-46f8-a3b7-016e7c228f88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.112252 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3e906e3d-d501-43fd-baae-8c5b606c7c58-encryption-config\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.112755 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/16dd8123-4c76-4fa6-8792-d1fe4a68b782-metrics-tls\") pod \"ingress-operator-5b745b69d9-m24dn\" (UID: \"16dd8123-4c76-4fa6-8792-d1fe4a68b782\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.113112 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33355e8a-5a62-49d4-8c71-ab546cdbf141-serving-cert\") pod \"route-controller-manager-6576b87f9c-bmz5w\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.120782 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.128208 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a8d9c95e-97f4-46f8-a3b7-016e7c228f88-srv-cert\") pod \"olm-operator-6b444d44fb-k88nl\" (UID: \"a8d9c95e-97f4-46f8-a3b7-016e7c228f88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.140095 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.159255 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.179259 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.184764 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-audit-policies\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.198950 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.200539 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqzvh\" (UniqueName: \"kubernetes.io/projected/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-kube-api-access-hqzvh\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.200570 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3f1dbe36-7205-4fac-9ce9-0194d29a6bba-stats-auth\") pod \"router-default-5444994796-vfb94\" (UID: \"3f1dbe36-7205-4fac-9ce9-0194d29a6bba\") " pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.200632 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-plugins-dir\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.200651 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d8d1851-76f3-46ad-8637-738acb08ea9b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pdwp4\" (UID: \"9d8d1851-76f3-46ad-8637-738acb08ea9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.200694 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8whg\" (UniqueName: \"kubernetes.io/projected/3f1dbe36-7205-4fac-9ce9-0194d29a6bba-kube-api-access-j8whg\") pod \"router-default-5444994796-vfb94\" (UID: \"3f1dbe36-7205-4fac-9ce9-0194d29a6bba\") " pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.200780 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-csi-data-dir\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.200823 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/29418836-a9b6-42c2-90b1-755ff73fe3fa-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-mrjxs\" (UID: \"29418836-a9b6-42c2-90b1-755ff73fe3fa\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mrjxs" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.200842 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-mountpoint-dir\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.200902 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d8d1851-76f3-46ad-8637-738acb08ea9b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pdwp4\" (UID: \"9d8d1851-76f3-46ad-8637-738acb08ea9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.200930 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-registration-dir\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.200953 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9d8d1851-76f3-46ad-8637-738acb08ea9b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pdwp4\" (UID: \"9d8d1851-76f3-46ad-8637-738acb08ea9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.200969 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-plugins-dir\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.200978 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-csi-data-dir\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.201043 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-mountpoint-dir\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.201057 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-registration-dir\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.201128 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8792f\" (UniqueName: \"kubernetes.io/projected/29418836-a9b6-42c2-90b1-755ff73fe3fa-kube-api-access-8792f\") pod \"control-plane-machine-set-operator-78cbb6b69f-mrjxs\" (UID: \"29418836-a9b6-42c2-90b1-755ff73fe3fa\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mrjxs" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.201167 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3f1dbe36-7205-4fac-9ce9-0194d29a6bba-default-certificate\") pod \"router-default-5444994796-vfb94\" (UID: \"3f1dbe36-7205-4fac-9ce9-0194d29a6bba\") " pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.201193 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f1dbe36-7205-4fac-9ce9-0194d29a6bba-service-ca-bundle\") pod \"router-default-5444994796-vfb94\" (UID: \"3f1dbe36-7205-4fac-9ce9-0194d29a6bba\") " pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.201214 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-socket-dir\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.201287 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f1dbe36-7205-4fac-9ce9-0194d29a6bba-metrics-certs\") pod \"router-default-5444994796-vfb94\" (UID: \"3f1dbe36-7205-4fac-9ce9-0194d29a6bba\") " pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.201358 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-socket-dir\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.218771 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.221568 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-etcd-client\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.239003 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.258960 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.266982 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-encryption-config\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.279013 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.288285 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.299780 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.319675 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.329379 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/901d2740-8e94-4290-8a30-be45a95d5f74-serving-cert\") pod \"service-ca-operator-777779d784-xmrpm\" (UID: \"901d2740-8e94-4290-8a30-be45a95d5f74\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.339304 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.348063 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.359502 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.379541 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.387229 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-serving-cert\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.401564 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.412123 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/7732124a-a282-433a-83e7-040dd881ac56-etcd-ca\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.419667 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.439392 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.446068 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7732124a-a282-433a-83e7-040dd881ac56-etcd-client\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.459499 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.478665 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.488694 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7732124a-a282-433a-83e7-040dd881ac56-serving-cert\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.499429 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.509006 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/7732124a-a282-433a-83e7-040dd881ac56-etcd-service-ca\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.518489 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.525561 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7732124a-a282-433a-83e7-040dd881ac56-config\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.539470 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.547645 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/901d2740-8e94-4290-8a30-be45a95d5f74-config\") pod \"service-ca-operator-777779d784-xmrpm\" (UID: \"901d2740-8e94-4290-8a30-be45a95d5f74\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.559057 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.579413 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.598559 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.639628 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.641982 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f1dbe36-7205-4fac-9ce9-0194d29a6bba-service-ca-bundle\") pod \"router-default-5444994796-vfb94\" (UID: \"3f1dbe36-7205-4fac-9ce9-0194d29a6bba\") " pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.659325 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.663987 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/3f1dbe36-7205-4fac-9ce9-0194d29a6bba-default-certificate\") pod \"router-default-5444994796-vfb94\" (UID: \"3f1dbe36-7205-4fac-9ce9-0194d29a6bba\") " pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.678611 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.683857 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/3f1dbe36-7205-4fac-9ce9-0194d29a6bba-stats-auth\") pod \"router-default-5444994796-vfb94\" (UID: \"3f1dbe36-7205-4fac-9ce9-0194d29a6bba\") " pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.699178 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.704460 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f1dbe36-7205-4fac-9ce9-0194d29a6bba-metrics-certs\") pod \"router-default-5444994796-vfb94\" (UID: \"3f1dbe36-7205-4fac-9ce9-0194d29a6bba\") " pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.718817 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.739257 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.759088 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.779056 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.799133 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.819895 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.839280 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.859464 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.878713 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.900154 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.919216 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.939725 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.957714 4644 request.go:700] Waited for 1.001337268s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmachine-config-controller-dockercfg-c2lfx&limit=500&resourceVersion=0 Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.959154 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.978923 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 13 06:47:51 crc kubenswrapper[4644]: I1213 06:47:51.998380 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.019055 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.038308 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.043828 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d8d1851-76f3-46ad-8637-738acb08ea9b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pdwp4\" (UID: \"9d8d1851-76f3-46ad-8637-738acb08ea9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.059310 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.061812 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d8d1851-76f3-46ad-8637-738acb08ea9b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pdwp4\" (UID: \"9d8d1851-76f3-46ad-8637-738acb08ea9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.079109 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.098747 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.119284 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.138843 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.159764 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.178956 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.199075 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 13 06:47:52 crc kubenswrapper[4644]: E1213 06:47:52.201037 4644 secret.go:188] Couldn't get secret openshift-machine-api/control-plane-machine-set-operator-tls: failed to sync secret cache: timed out waiting for the condition Dec 13 06:47:52 crc kubenswrapper[4644]: E1213 06:47:52.201121 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/29418836-a9b6-42c2-90b1-755ff73fe3fa-control-plane-machine-set-operator-tls podName:29418836-a9b6-42c2-90b1-755ff73fe3fa nodeName:}" failed. No retries permitted until 2025-12-13 06:47:52.701101006 +0000 UTC m=+134.916051839 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "control-plane-machine-set-operator-tls" (UniqueName: "kubernetes.io/secret/29418836-a9b6-42c2-90b1-755ff73fe3fa-control-plane-machine-set-operator-tls") pod "control-plane-machine-set-operator-78cbb6b69f-mrjxs" (UID: "29418836-a9b6-42c2-90b1-755ff73fe3fa") : failed to sync secret cache: timed out waiting for the condition Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.218282 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.243493 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.258880 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.280432 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.299040 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.319658 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.338991 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.359089 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.378519 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.399050 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.419309 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.439089 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.459144 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.478982 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.498539 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.519252 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.539209 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.558534 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.578796 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.599066 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.619523 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.638602 4644 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.658380 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.678782 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.699197 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.718248 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.719961 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/29418836-a9b6-42c2-90b1-755ff73fe3fa-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-mrjxs\" (UID: \"29418836-a9b6-42c2-90b1-755ff73fe3fa\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mrjxs" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.722931 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/29418836-a9b6-42c2-90b1-755ff73fe3fa-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-mrjxs\" (UID: \"29418836-a9b6-42c2-90b1-755ff73fe3fa\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mrjxs" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.739593 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.760349 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.779651 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.798502 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.839311 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.859370 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.878859 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.898717 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.933062 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pktkx\" (UniqueName: \"kubernetes.io/projected/901d2740-8e94-4290-8a30-be45a95d5f74-kube-api-access-pktkx\") pod \"service-ca-operator-777779d784-xmrpm\" (UID: \"901d2740-8e94-4290-8a30-be45a95d5f74\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.951660 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x56tq\" (UniqueName: \"kubernetes.io/projected/9315f9b2-8068-4cbb-88ad-5f4046b3dbdc-kube-api-access-x56tq\") pod \"console-operator-58897d9998-shtgr\" (UID: \"9315f9b2-8068-4cbb-88ad-5f4046b3dbdc\") " pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.970492 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jqgt\" (UniqueName: \"kubernetes.io/projected/f0e9ddf9-d763-42e9-8682-7aa45939292d-kube-api-access-2jqgt\") pod \"openshift-config-operator-7777fb866f-qbqgt\" (UID: \"f0e9ddf9-d763-42e9-8682-7aa45939292d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.977615 4644 request.go:700] Waited for 1.877256459s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-operator/serviceaccounts/ingress-operator/token Dec 13 06:47:52 crc kubenswrapper[4644]: I1213 06:47:52.990644 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/16dd8123-4c76-4fa6-8792-d1fe4a68b782-bound-sa-token\") pod \"ingress-operator-5b745b69d9-m24dn\" (UID: \"16dd8123-4c76-4fa6-8792-d1fe4a68b782\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.011576 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8mmp\" (UniqueName: \"kubernetes.io/projected/a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50-kube-api-access-v8mmp\") pod \"machine-api-operator-5694c8668f-jjpl8\" (UID: \"a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.033304 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgqs8\" (UniqueName: \"kubernetes.io/projected/a316aeb4-d464-4fc3-b4b7-7df5e00e68be-kube-api-access-bgqs8\") pod \"apiserver-7bbb656c7d-tbrk2\" (UID: \"a316aeb4-d464-4fc3-b4b7-7df5e00e68be\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.052210 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcg78\" (UniqueName: \"kubernetes.io/projected/3e906e3d-d501-43fd-baae-8c5b606c7c58-kube-api-access-jcg78\") pod \"apiserver-76f77b778f-8ktwh\" (UID: \"3e906e3d-d501-43fd-baae-8c5b606c7c58\") " pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.057128 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.070162 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cdrk\" (UniqueName: \"kubernetes.io/projected/fde8db32-8ec8-4f55-b401-2a796bc1d353-kube-api-access-9cdrk\") pod \"authentication-operator-69f744f599-6xwbf\" (UID: \"fde8db32-8ec8-4f55-b401-2a796bc1d353\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.092565 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70f75bc7-a420-41c8-9b46-2cf4039210bd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-lknvc\" (UID: \"70f75bc7-a420-41c8-9b46-2cf4039210bd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.112598 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgtcd\" (UniqueName: \"kubernetes.io/projected/cd4c2102-9555-4eb4-8d7e-91d8c7020a0e-kube-api-access-lgtcd\") pod \"downloads-7954f5f757-dtxqg\" (UID: \"cd4c2102-9555-4eb4-8d7e-91d8c7020a0e\") " pod="openshift-console/downloads-7954f5f757-dtxqg" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.122613 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.134054 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-dtxqg" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.137274 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csgmx\" (UniqueName: \"kubernetes.io/projected/7732124a-a282-433a-83e7-040dd881ac56-kube-api-access-csgmx\") pod \"etcd-operator-b45778765-c69v8\" (UID: \"7732124a-a282-433a-83e7-040dd881ac56\") " pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.142026 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.155533 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqmnn\" (UniqueName: \"kubernetes.io/projected/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-kube-api-access-tqmnn\") pod \"controller-manager-879f6c89f-mh84f\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.164700 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.172632 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.175679 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vdrk\" (UniqueName: \"kubernetes.io/projected/85a3c5f0-2d32-465d-9c1e-689f35f25507-kube-api-access-4vdrk\") pod \"openshift-controller-manager-operator-756b6f6bc6-lzb4z\" (UID: \"85a3c5f0-2d32-465d-9c1e-689f35f25507\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.179780 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.193831 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xd8ml\" (UniqueName: \"kubernetes.io/projected/33355e8a-5a62-49d4-8c71-ab546cdbf141-kube-api-access-xd8ml\") pod \"route-controller-manager-6576b87f9c-bmz5w\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.197798 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.198509 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8ktwh"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.219017 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5zjn\" (UniqueName: \"kubernetes.io/projected/6c1981b4-cec6-4c20-807a-982199dc5f81-kube-api-access-t5zjn\") pod \"cluster-image-registry-operator-dc59b4c8b-rws8k\" (UID: \"6c1981b4-cec6-4c20-807a-982199dc5f81\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.236238 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlscj\" (UniqueName: \"kubernetes.io/projected/a8d9c95e-97f4-46f8-a3b7-016e7c228f88-kube-api-access-jlscj\") pod \"olm-operator-6b444d44fb-k88nl\" (UID: \"a8d9c95e-97f4-46f8-a3b7-016e7c228f88\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.253239 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7v52\" (UniqueName: \"kubernetes.io/projected/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-kube-api-access-k7v52\") pod \"console-f9d7485db-wfsz4\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.272655 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt4m6\" (UniqueName: \"kubernetes.io/projected/622db6f9-2148-4569-88a6-f37650895811-kube-api-access-tt4m6\") pod \"cluster-samples-operator-665b6dd947-vgkx8\" (UID: \"622db6f9-2148-4569-88a6-f37650895811\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vgkx8" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.273724 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.293161 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.293754 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbj84\" (UniqueName: \"kubernetes.io/projected/16dd8123-4c76-4fa6-8792-d1fe4a68b782-kube-api-access-vbj84\") pod \"ingress-operator-5b745b69d9-m24dn\" (UID: \"16dd8123-4c76-4fa6-8792-d1fe4a68b782\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.307782 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.316597 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6c1981b4-cec6-4c20-807a-982199dc5f81-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rws8k\" (UID: \"6c1981b4-cec6-4c20-807a-982199dc5f81\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.320731 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-shtgr"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.332174 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvhrk\" (UniqueName: \"kubernetes.io/projected/773a896a-a43c-4a09-a6e7-42063bf34606-kube-api-access-qvhrk\") pod \"dns-operator-744455d44c-d7jk5\" (UID: \"773a896a-a43c-4a09-a6e7-42063bf34606\") " pod="openshift-dns-operator/dns-operator-744455d44c-d7jk5" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.353640 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smg29\" (UniqueName: \"kubernetes.io/projected/b9059962-adc6-4278-aead-d07a310b9776-kube-api-access-smg29\") pod \"oauth-openshift-558db77b4-4z6jl\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.360354 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vgkx8" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.377033 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfvmq\" (UniqueName: \"kubernetes.io/projected/8fc32d46-8130-4cd4-a3a6-7e2377a67785-kube-api-access-gfvmq\") pod \"machine-approver-56656f9798-rpj6t\" (UID: \"8fc32d46-8130-4cd4-a3a6-7e2377a67785\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.381656 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.383131 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.393967 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxsdp\" (UniqueName: \"kubernetes.io/projected/3e62a18e-fc3a-4859-9a12-cd740f5ee8a1-kube-api-access-qxsdp\") pod \"openshift-apiserver-operator-796bbdcf4f-2h6ps\" (UID: \"3e62a18e-fc3a-4859-9a12-cd740f5ee8a1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.396105 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.404008 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.411939 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.414046 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqzvh\" (UniqueName: \"kubernetes.io/projected/8eafd0da-eec5-48e4-9ff3-0e8367d338c2-kube-api-access-hqzvh\") pod \"csi-hostpathplugin-hmhwd\" (UID: \"8eafd0da-eec5-48e4-9ff3-0e8367d338c2\") " pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:53 crc kubenswrapper[4644]: W1213 06:47:53.414166 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70f75bc7_a420_41c8_9b46_2cf4039210bd.slice/crio-ccd6c291005eb628466bcea610ef01b0a33b24463e4147022495d363bc03b59e WatchSource:0}: Error finding container ccd6c291005eb628466bcea610ef01b0a33b24463e4147022495d363bc03b59e: Status 404 returned error can't find the container with id ccd6c291005eb628466bcea610ef01b0a33b24463e4147022495d363bc03b59e Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.417042 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.428206 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.436452 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8whg\" (UniqueName: \"kubernetes.io/projected/3f1dbe36-7205-4fac-9ce9-0194d29a6bba-kube-api-access-j8whg\") pod \"router-default-5444994796-vfb94\" (UID: \"3f1dbe36-7205-4fac-9ce9-0194d29a6bba\") " pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.447147 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-d7jk5" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.452983 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.453250 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9d8d1851-76f3-46ad-8637-738acb08ea9b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pdwp4\" (UID: \"9d8d1851-76f3-46ad-8637-738acb08ea9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.461068 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.476304 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8792f\" (UniqueName: \"kubernetes.io/projected/29418836-a9b6-42c2-90b1-755ff73fe3fa-kube-api-access-8792f\") pod \"control-plane-machine-set-operator-78cbb6b69f-mrjxs\" (UID: \"29418836-a9b6-42c2-90b1-755ff73fe3fa\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mrjxs" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.485805 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-jjpl8"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.503529 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532010 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8b5eab9d-beb2-4574-bd14-831322b154d4-webhook-cert\") pod \"packageserver-d55dfcdfc-h6zvh\" (UID: \"8b5eab9d-beb2-4574-bd14-831322b154d4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532075 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz4kx\" (UniqueName: \"kubernetes.io/projected/e05235cb-84ad-45ae-9fd3-02803c0fd752-kube-api-access-bz4kx\") pod \"machine-config-controller-84d6567774-j5lrz\" (UID: \"e05235cb-84ad-45ae-9fd3-02803c0fd752\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532174 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/48b60af5-ee30-4605-8cbf-01c379981b3e-profile-collector-cert\") pod \"catalog-operator-68c6474976-kvg6b\" (UID: \"48b60af5-ee30-4605-8cbf-01c379981b3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532236 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/79103034-e1a4-44b1-bffc-e9edc76da393-registry-certificates\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532258 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/3efe95b6-b728-4044-b3e7-076482b73a63-certs\") pod \"machine-config-server-cvf68\" (UID: \"3efe95b6-b728-4044-b3e7-076482b73a63\") " pod="openshift-machine-config-operator/machine-config-server-cvf68" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532351 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/79103034-e1a4-44b1-bffc-e9edc76da393-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532548 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfpnj\" (UniqueName: \"kubernetes.io/projected/48b60af5-ee30-4605-8cbf-01c379981b3e-kube-api-access-mfpnj\") pod \"catalog-operator-68c6474976-kvg6b\" (UID: \"48b60af5-ee30-4605-8cbf-01c379981b3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532615 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e05235cb-84ad-45ae-9fd3-02803c0fd752-proxy-tls\") pod \"machine-config-controller-84d6567774-j5lrz\" (UID: \"e05235cb-84ad-45ae-9fd3-02803c0fd752\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532630 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cf016b2d-7155-49d8-9675-567dc1cb1dfe-metrics-tls\") pod \"dns-default-hn5l6\" (UID: \"cf016b2d-7155-49d8-9675-567dc1cb1dfe\") " pod="openshift-dns/dns-default-hn5l6" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532673 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/09933a3a-4eb6-466f-af47-f3d4f6bb2709-auth-proxy-config\") pod \"machine-config-operator-74547568cd-gftmb\" (UID: \"09933a3a-4eb6-466f-af47-f3d4f6bb2709\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532704 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtws7\" (UniqueName: \"kubernetes.io/projected/efc4a9ab-d47c-4425-aecf-746386c5ee97-kube-api-access-qtws7\") pod \"migrator-59844c95c7-9nmb7\" (UID: \"efc4a9ab-d47c-4425-aecf-746386c5ee97\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9nmb7" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532745 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzgms\" (UniqueName: \"kubernetes.io/projected/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-kube-api-access-vzgms\") pod \"marketplace-operator-79b997595-crmqj\" (UID: \"5de2f44b-564a-461c-b9e1-b4b306d8ecb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532784 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/48b60af5-ee30-4605-8cbf-01c379981b3e-srv-cert\") pod \"catalog-operator-68c6474976-kvg6b\" (UID: \"48b60af5-ee30-4605-8cbf-01c379981b3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532851 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkssc\" (UniqueName: \"kubernetes.io/projected/baa88468-def9-4705-8a15-a97e14eba154-kube-api-access-tkssc\") pod \"multus-admission-controller-857f4d67dd-wqmsp\" (UID: \"baa88468-def9-4705-8a15-a97e14eba154\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wqmsp" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532877 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbkff\" (UniqueName: \"kubernetes.io/projected/17cbd4ea-f855-40e4-aa42-fa46efa2a393-kube-api-access-hbkff\") pod \"package-server-manager-789f6589d5-6kp77\" (UID: \"17cbd4ea-f855-40e4-aa42-fa46efa2a393\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532914 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/09933a3a-4eb6-466f-af47-f3d4f6bb2709-proxy-tls\") pod \"machine-config-operator-74547568cd-gftmb\" (UID: \"09933a3a-4eb6-466f-af47-f3d4f6bb2709\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532935 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/460ee6fa-709e-4281-b1a7-3ca5f795d684-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bgprm\" (UID: \"460ee6fa-709e-4281-b1a7-3ca5f795d684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.532987 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/3efe95b6-b728-4044-b3e7-076482b73a63-node-bootstrap-token\") pod \"machine-config-server-cvf68\" (UID: \"3efe95b6-b728-4044-b3e7-076482b73a63\") " pod="openshift-machine-config-operator/machine-config-server-cvf68" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.533026 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e05235cb-84ad-45ae-9fd3-02803c0fd752-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-j5lrz\" (UID: \"e05235cb-84ad-45ae-9fd3-02803c0fd752\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.533908 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f8aab518-1735-49f5-98af-68d01e096132-secret-volume\") pod \"collect-profiles-29426805-wg25g\" (UID: \"f8aab518-1735-49f5-98af-68d01e096132\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.533939 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7drp\" (UniqueName: \"kubernetes.io/projected/358d9360-7d08-47de-b8f5-81205f934c81-kube-api-access-g7drp\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdd25\" (UID: \"358d9360-7d08-47de-b8f5-81205f934c81\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.533973 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8b5eab9d-beb2-4574-bd14-831322b154d4-apiservice-cert\") pod \"packageserver-d55dfcdfc-h6zvh\" (UID: \"8b5eab9d-beb2-4574-bd14-831322b154d4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534015 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/460ee6fa-709e-4281-b1a7-3ca5f795d684-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bgprm\" (UID: \"460ee6fa-709e-4281-b1a7-3ca5f795d684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534031 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsfjs\" (UniqueName: \"kubernetes.io/projected/f8aab518-1735-49f5-98af-68d01e096132-kube-api-access-lsfjs\") pod \"collect-profiles-29426805-wg25g\" (UID: \"f8aab518-1735-49f5-98af-68d01e096132\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534045 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpn5z\" (UniqueName: \"kubernetes.io/projected/3efe95b6-b728-4044-b3e7-076482b73a63-kube-api-access-kpn5z\") pod \"machine-config-server-cvf68\" (UID: \"3efe95b6-b728-4044-b3e7-076482b73a63\") " pod="openshift-machine-config-operator/machine-config-server-cvf68" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534085 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhktw\" (UniqueName: \"kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-kube-api-access-zhktw\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534122 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-crmqj\" (UID: \"5de2f44b-564a-461c-b9e1-b4b306d8ecb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534151 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkwxr\" (UniqueName: \"kubernetes.io/projected/a8b48e2c-cb80-4c41-bd8b-8a57190f5cca-kube-api-access-vkwxr\") pod \"service-ca-9c57cc56f-l4cnx\" (UID: \"a8b48e2c-cb80-4c41-bd8b-8a57190f5cca\") " pod="openshift-service-ca/service-ca-9c57cc56f-l4cnx" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534176 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8b5eab9d-beb2-4574-bd14-831322b154d4-tmpfs\") pod \"packageserver-d55dfcdfc-h6zvh\" (UID: \"8b5eab9d-beb2-4574-bd14-831322b154d4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534192 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfjth\" (UniqueName: \"kubernetes.io/projected/cf016b2d-7155-49d8-9675-567dc1cb1dfe-kube-api-access-kfjth\") pod \"dns-default-hn5l6\" (UID: \"cf016b2d-7155-49d8-9675-567dc1cb1dfe\") " pod="openshift-dns/dns-default-hn5l6" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534213 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-crmqj\" (UID: \"5de2f44b-564a-461c-b9e1-b4b306d8ecb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534241 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/79103034-e1a4-44b1-bffc-e9edc76da393-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534267 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a8b48e2c-cb80-4c41-bd8b-8a57190f5cca-signing-cabundle\") pod \"service-ca-9c57cc56f-l4cnx\" (UID: \"a8b48e2c-cb80-4c41-bd8b-8a57190f5cca\") " pod="openshift-service-ca/service-ca-9c57cc56f-l4cnx" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534329 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/17cbd4ea-f855-40e4-aa42-fa46efa2a393-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-6kp77\" (UID: \"17cbd4ea-f855-40e4-aa42-fa46efa2a393\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534369 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/09933a3a-4eb6-466f-af47-f3d4f6bb2709-images\") pod \"machine-config-operator-74547568cd-gftmb\" (UID: \"09933a3a-4eb6-466f-af47-f3d4f6bb2709\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534404 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a8b48e2c-cb80-4c41-bd8b-8a57190f5cca-signing-key\") pod \"service-ca-9c57cc56f-l4cnx\" (UID: \"a8b48e2c-cb80-4c41-bd8b-8a57190f5cca\") " pod="openshift-service-ca/service-ca-9c57cc56f-l4cnx" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534471 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxllh\" (UniqueName: \"kubernetes.io/projected/8b5eab9d-beb2-4574-bd14-831322b154d4-kube-api-access-rxllh\") pod \"packageserver-d55dfcdfc-h6zvh\" (UID: \"8b5eab9d-beb2-4574-bd14-831322b154d4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534498 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f8aab518-1735-49f5-98af-68d01e096132-config-volume\") pod \"collect-profiles-29426805-wg25g\" (UID: \"f8aab518-1735-49f5-98af-68d01e096132\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534522 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-registry-tls\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534536 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/460ee6fa-709e-4281-b1a7-3ca5f795d684-config\") pod \"kube-controller-manager-operator-78b949d7b-bgprm\" (UID: \"460ee6fa-709e-4281-b1a7-3ca5f795d684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534552 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/baa88468-def9-4705-8a15-a97e14eba154-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-wqmsp\" (UID: \"baa88468-def9-4705-8a15-a97e14eba154\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wqmsp" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534576 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534593 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cf016b2d-7155-49d8-9675-567dc1cb1dfe-config-volume\") pod \"dns-default-hn5l6\" (UID: \"cf016b2d-7155-49d8-9675-567dc1cb1dfe\") " pod="openshift-dns/dns-default-hn5l6" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534610 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2hgn\" (UniqueName: \"kubernetes.io/projected/09933a3a-4eb6-466f-af47-f3d4f6bb2709-kube-api-access-q2hgn\") pod \"machine-config-operator-74547568cd-gftmb\" (UID: \"09933a3a-4eb6-466f-af47-f3d4f6bb2709\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534626 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/358d9360-7d08-47de-b8f5-81205f934c81-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdd25\" (UID: \"358d9360-7d08-47de-b8f5-81205f934c81\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534656 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/79103034-e1a4-44b1-bffc-e9edc76da393-trusted-ca\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534670 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-bound-sa-token\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.534763 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/358d9360-7d08-47de-b8f5-81205f934c81-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdd25\" (UID: \"358d9360-7d08-47de-b8f5-81205f934c81\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25" Dec 13 06:47:53 crc kubenswrapper[4644]: E1213 06:47:53.539262 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:54.039250324 +0000 UTC m=+136.254201157 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.547363 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.552711 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-dtxqg"] Dec 13 06:47:53 crc kubenswrapper[4644]: W1213 06:47:53.557994 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda092bf2e_2ca3_4e02_b0d3_09d4cedb4e50.slice/crio-4863a58a21e89127e693dccc1781b1df89898907af319947aaac3199026f69d9 WatchSource:0}: Error finding container 4863a58a21e89127e693dccc1781b1df89898907af319947aaac3199026f69d9: Status 404 returned error can't find the container with id 4863a58a21e89127e693dccc1781b1df89898907af319947aaac3199026f69d9 Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.568604 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt"] Dec 13 06:47:53 crc kubenswrapper[4644]: W1213 06:47:53.570316 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcd4c2102_9555_4eb4_8d7e_91d8c7020a0e.slice/crio-fd1c439af3588d9bd8e9a6a834d67a5d687977a8ffcc9a04150338df8c1fd8d5 WatchSource:0}: Error finding container fd1c439af3588d9bd8e9a6a834d67a5d687977a8ffcc9a04150338df8c1fd8d5: Status 404 returned error can't find the container with id fd1c439af3588d9bd8e9a6a834d67a5d687977a8ffcc9a04150338df8c1fd8d5 Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.579957 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.593815 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vgkx8"] Dec 13 06:47:53 crc kubenswrapper[4644]: W1213 06:47:53.604780 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf0e9ddf9_d763_42e9_8682_7aa45939292d.slice/crio-600bb30dd979add1cc1c8804ea6df89af3fc9d3a41cc31199fd20bfa28ab776d WatchSource:0}: Error finding container 600bb30dd979add1cc1c8804ea6df89af3fc9d3a41cc31199fd20bfa28ab776d: Status 404 returned error can't find the container with id 600bb30dd979add1cc1c8804ea6df89af3fc9d3a41cc31199fd20bfa28ab776d Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.608143 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-c69v8"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.614954 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mrjxs" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.641369 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:53 crc kubenswrapper[4644]: E1213 06:47:53.641651 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:54.141626148 +0000 UTC m=+136.356576982 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.641702 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzgms\" (UniqueName: \"kubernetes.io/projected/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-kube-api-access-vzgms\") pod \"marketplace-operator-79b997595-crmqj\" (UID: \"5de2f44b-564a-461c-b9e1-b4b306d8ecb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.641750 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/48b60af5-ee30-4605-8cbf-01c379981b3e-srv-cert\") pod \"catalog-operator-68c6474976-kvg6b\" (UID: \"48b60af5-ee30-4605-8cbf-01c379981b3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.641777 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkssc\" (UniqueName: \"kubernetes.io/projected/baa88468-def9-4705-8a15-a97e14eba154-kube-api-access-tkssc\") pod \"multus-admission-controller-857f4d67dd-wqmsp\" (UID: \"baa88468-def9-4705-8a15-a97e14eba154\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wqmsp" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.641807 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbkff\" (UniqueName: \"kubernetes.io/projected/17cbd4ea-f855-40e4-aa42-fa46efa2a393-kube-api-access-hbkff\") pod \"package-server-manager-789f6589d5-6kp77\" (UID: \"17cbd4ea-f855-40e4-aa42-fa46efa2a393\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.641824 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/09933a3a-4eb6-466f-af47-f3d4f6bb2709-proxy-tls\") pod \"machine-config-operator-74547568cd-gftmb\" (UID: \"09933a3a-4eb6-466f-af47-f3d4f6bb2709\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.641854 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/460ee6fa-709e-4281-b1a7-3ca5f795d684-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bgprm\" (UID: \"460ee6fa-709e-4281-b1a7-3ca5f795d684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.641875 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/3efe95b6-b728-4044-b3e7-076482b73a63-node-bootstrap-token\") pod \"machine-config-server-cvf68\" (UID: \"3efe95b6-b728-4044-b3e7-076482b73a63\") " pod="openshift-machine-config-operator/machine-config-server-cvf68" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.641897 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e05235cb-84ad-45ae-9fd3-02803c0fd752-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-j5lrz\" (UID: \"e05235cb-84ad-45ae-9fd3-02803c0fd752\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.641917 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f8aab518-1735-49f5-98af-68d01e096132-secret-volume\") pod \"collect-profiles-29426805-wg25g\" (UID: \"f8aab518-1735-49f5-98af-68d01e096132\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.641933 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7drp\" (UniqueName: \"kubernetes.io/projected/358d9360-7d08-47de-b8f5-81205f934c81-kube-api-access-g7drp\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdd25\" (UID: \"358d9360-7d08-47de-b8f5-81205f934c81\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.641954 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8b5eab9d-beb2-4574-bd14-831322b154d4-apiservice-cert\") pod \"packageserver-d55dfcdfc-h6zvh\" (UID: \"8b5eab9d-beb2-4574-bd14-831322b154d4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.641984 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/460ee6fa-709e-4281-b1a7-3ca5f795d684-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bgprm\" (UID: \"460ee6fa-709e-4281-b1a7-3ca5f795d684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642001 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsfjs\" (UniqueName: \"kubernetes.io/projected/f8aab518-1735-49f5-98af-68d01e096132-kube-api-access-lsfjs\") pod \"collect-profiles-29426805-wg25g\" (UID: \"f8aab518-1735-49f5-98af-68d01e096132\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642015 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpn5z\" (UniqueName: \"kubernetes.io/projected/3efe95b6-b728-4044-b3e7-076482b73a63-kube-api-access-kpn5z\") pod \"machine-config-server-cvf68\" (UID: \"3efe95b6-b728-4044-b3e7-076482b73a63\") " pod="openshift-machine-config-operator/machine-config-server-cvf68" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642035 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c2506a59-7a09-4c47-aead-bdb7aaec207f-cert\") pod \"ingress-canary-h4qgz\" (UID: \"c2506a59-7a09-4c47-aead-bdb7aaec207f\") " pod="openshift-ingress-canary/ingress-canary-h4qgz" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642067 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhktw\" (UniqueName: \"kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-kube-api-access-zhktw\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642110 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-crmqj\" (UID: \"5de2f44b-564a-461c-b9e1-b4b306d8ecb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642136 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkwxr\" (UniqueName: \"kubernetes.io/projected/a8b48e2c-cb80-4c41-bd8b-8a57190f5cca-kube-api-access-vkwxr\") pod \"service-ca-9c57cc56f-l4cnx\" (UID: \"a8b48e2c-cb80-4c41-bd8b-8a57190f5cca\") " pod="openshift-service-ca/service-ca-9c57cc56f-l4cnx" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642157 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8b5eab9d-beb2-4574-bd14-831322b154d4-tmpfs\") pod \"packageserver-d55dfcdfc-h6zvh\" (UID: \"8b5eab9d-beb2-4574-bd14-831322b154d4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642175 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfjth\" (UniqueName: \"kubernetes.io/projected/cf016b2d-7155-49d8-9675-567dc1cb1dfe-kube-api-access-kfjth\") pod \"dns-default-hn5l6\" (UID: \"cf016b2d-7155-49d8-9675-567dc1cb1dfe\") " pod="openshift-dns/dns-default-hn5l6" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642203 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-crmqj\" (UID: \"5de2f44b-564a-461c-b9e1-b4b306d8ecb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642221 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/79103034-e1a4-44b1-bffc-e9edc76da393-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642242 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a8b48e2c-cb80-4c41-bd8b-8a57190f5cca-signing-cabundle\") pod \"service-ca-9c57cc56f-l4cnx\" (UID: \"a8b48e2c-cb80-4c41-bd8b-8a57190f5cca\") " pod="openshift-service-ca/service-ca-9c57cc56f-l4cnx" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642262 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/17cbd4ea-f855-40e4-aa42-fa46efa2a393-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-6kp77\" (UID: \"17cbd4ea-f855-40e4-aa42-fa46efa2a393\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642308 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/09933a3a-4eb6-466f-af47-f3d4f6bb2709-images\") pod \"machine-config-operator-74547568cd-gftmb\" (UID: \"09933a3a-4eb6-466f-af47-f3d4f6bb2709\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642340 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a8b48e2c-cb80-4c41-bd8b-8a57190f5cca-signing-key\") pod \"service-ca-9c57cc56f-l4cnx\" (UID: \"a8b48e2c-cb80-4c41-bd8b-8a57190f5cca\") " pod="openshift-service-ca/service-ca-9c57cc56f-l4cnx" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642366 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxllh\" (UniqueName: \"kubernetes.io/projected/8b5eab9d-beb2-4574-bd14-831322b154d4-kube-api-access-rxllh\") pod \"packageserver-d55dfcdfc-h6zvh\" (UID: \"8b5eab9d-beb2-4574-bd14-831322b154d4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642392 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f8aab518-1735-49f5-98af-68d01e096132-config-volume\") pod \"collect-profiles-29426805-wg25g\" (UID: \"f8aab518-1735-49f5-98af-68d01e096132\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642410 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-registry-tls\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642452 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/460ee6fa-709e-4281-b1a7-3ca5f795d684-config\") pod \"kube-controller-manager-operator-78b949d7b-bgprm\" (UID: \"460ee6fa-709e-4281-b1a7-3ca5f795d684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642469 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/baa88468-def9-4705-8a15-a97e14eba154-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-wqmsp\" (UID: \"baa88468-def9-4705-8a15-a97e14eba154\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wqmsp" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642504 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642530 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cf016b2d-7155-49d8-9675-567dc1cb1dfe-config-volume\") pod \"dns-default-hn5l6\" (UID: \"cf016b2d-7155-49d8-9675-567dc1cb1dfe\") " pod="openshift-dns/dns-default-hn5l6" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642546 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2hgn\" (UniqueName: \"kubernetes.io/projected/09933a3a-4eb6-466f-af47-f3d4f6bb2709-kube-api-access-q2hgn\") pod \"machine-config-operator-74547568cd-gftmb\" (UID: \"09933a3a-4eb6-466f-af47-f3d4f6bb2709\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642573 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/358d9360-7d08-47de-b8f5-81205f934c81-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdd25\" (UID: \"358d9360-7d08-47de-b8f5-81205f934c81\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642612 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/79103034-e1a4-44b1-bffc-e9edc76da393-trusted-ca\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642626 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-bound-sa-token\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642655 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvl2k\" (UniqueName: \"kubernetes.io/projected/c2506a59-7a09-4c47-aead-bdb7aaec207f-kube-api-access-pvl2k\") pod \"ingress-canary-h4qgz\" (UID: \"c2506a59-7a09-4c47-aead-bdb7aaec207f\") " pod="openshift-ingress-canary/ingress-canary-h4qgz" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642683 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/358d9360-7d08-47de-b8f5-81205f934c81-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdd25\" (UID: \"358d9360-7d08-47de-b8f5-81205f934c81\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642706 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8b5eab9d-beb2-4574-bd14-831322b154d4-webhook-cert\") pod \"packageserver-d55dfcdfc-h6zvh\" (UID: \"8b5eab9d-beb2-4574-bd14-831322b154d4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642724 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz4kx\" (UniqueName: \"kubernetes.io/projected/e05235cb-84ad-45ae-9fd3-02803c0fd752-kube-api-access-bz4kx\") pod \"machine-config-controller-84d6567774-j5lrz\" (UID: \"e05235cb-84ad-45ae-9fd3-02803c0fd752\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642768 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/48b60af5-ee30-4605-8cbf-01c379981b3e-profile-collector-cert\") pod \"catalog-operator-68c6474976-kvg6b\" (UID: \"48b60af5-ee30-4605-8cbf-01c379981b3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642788 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/79103034-e1a4-44b1-bffc-e9edc76da393-registry-certificates\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642827 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/3efe95b6-b728-4044-b3e7-076482b73a63-certs\") pod \"machine-config-server-cvf68\" (UID: \"3efe95b6-b728-4044-b3e7-076482b73a63\") " pod="openshift-machine-config-operator/machine-config-server-cvf68" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642851 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/79103034-e1a4-44b1-bffc-e9edc76da393-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642901 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfpnj\" (UniqueName: \"kubernetes.io/projected/48b60af5-ee30-4605-8cbf-01c379981b3e-kube-api-access-mfpnj\") pod \"catalog-operator-68c6474976-kvg6b\" (UID: \"48b60af5-ee30-4605-8cbf-01c379981b3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642920 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e05235cb-84ad-45ae-9fd3-02803c0fd752-proxy-tls\") pod \"machine-config-controller-84d6567774-j5lrz\" (UID: \"e05235cb-84ad-45ae-9fd3-02803c0fd752\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642935 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cf016b2d-7155-49d8-9675-567dc1cb1dfe-metrics-tls\") pod \"dns-default-hn5l6\" (UID: \"cf016b2d-7155-49d8-9675-567dc1cb1dfe\") " pod="openshift-dns/dns-default-hn5l6" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642949 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/09933a3a-4eb6-466f-af47-f3d4f6bb2709-auth-proxy-config\") pod \"machine-config-operator-74547568cd-gftmb\" (UID: \"09933a3a-4eb6-466f-af47-f3d4f6bb2709\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.642976 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtws7\" (UniqueName: \"kubernetes.io/projected/efc4a9ab-d47c-4425-aecf-746386c5ee97-kube-api-access-qtws7\") pod \"migrator-59844c95c7-9nmb7\" (UID: \"efc4a9ab-d47c-4425-aecf-746386c5ee97\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9nmb7" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.646273 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e05235cb-84ad-45ae-9fd3-02803c0fd752-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-j5lrz\" (UID: \"e05235cb-84ad-45ae-9fd3-02803c0fd752\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.647512 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/79103034-e1a4-44b1-bffc-e9edc76da393-ca-trust-extracted\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.648109 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a8b48e2c-cb80-4c41-bd8b-8a57190f5cca-signing-cabundle\") pod \"service-ca-9c57cc56f-l4cnx\" (UID: \"a8b48e2c-cb80-4c41-bd8b-8a57190f5cca\") " pod="openshift-service-ca/service-ca-9c57cc56f-l4cnx" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.649161 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.650212 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-crmqj\" (UID: \"5de2f44b-564a-461c-b9e1-b4b306d8ecb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.650623 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/79103034-e1a4-44b1-bffc-e9edc76da393-trusted-ca\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.651301 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/09933a3a-4eb6-466f-af47-f3d4f6bb2709-images\") pod \"machine-config-operator-74547568cd-gftmb\" (UID: \"09933a3a-4eb6-466f-af47-f3d4f6bb2709\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.655673 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8b5eab9d-beb2-4574-bd14-831322b154d4-tmpfs\") pod \"packageserver-d55dfcdfc-h6zvh\" (UID: \"8b5eab9d-beb2-4574-bd14-831322b154d4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.656164 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-crmqj\" (UID: \"5de2f44b-564a-461c-b9e1-b4b306d8ecb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" Dec 13 06:47:53 crc kubenswrapper[4644]: E1213 06:47:53.656622 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:54.156605299 +0000 UTC m=+136.371556132 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.658274 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f8aab518-1735-49f5-98af-68d01e096132-config-volume\") pod \"collect-profiles-29426805-wg25g\" (UID: \"f8aab518-1735-49f5-98af-68d01e096132\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.661269 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cf016b2d-7155-49d8-9675-567dc1cb1dfe-config-volume\") pod \"dns-default-hn5l6\" (UID: \"cf016b2d-7155-49d8-9675-567dc1cb1dfe\") " pod="openshift-dns/dns-default-hn5l6" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.661403 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/79103034-e1a4-44b1-bffc-e9edc76da393-registry-certificates\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.662079 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/460ee6fa-709e-4281-b1a7-3ca5f795d684-config\") pod \"kube-controller-manager-operator-78b949d7b-bgprm\" (UID: \"460ee6fa-709e-4281-b1a7-3ca5f795d684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.663825 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/460ee6fa-709e-4281-b1a7-3ca5f795d684-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bgprm\" (UID: \"460ee6fa-709e-4281-b1a7-3ca5f795d684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.664178 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/358d9360-7d08-47de-b8f5-81205f934c81-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdd25\" (UID: \"358d9360-7d08-47de-b8f5-81205f934c81\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.665531 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.665976 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f8aab518-1735-49f5-98af-68d01e096132-secret-volume\") pod \"collect-profiles-29426805-wg25g\" (UID: \"f8aab518-1735-49f5-98af-68d01e096132\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.667006 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/358d9360-7d08-47de-b8f5-81205f934c81-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdd25\" (UID: \"358d9360-7d08-47de-b8f5-81205f934c81\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.668144 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.669976 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/48b60af5-ee30-4605-8cbf-01c379981b3e-profile-collector-cert\") pod \"catalog-operator-68c6474976-kvg6b\" (UID: \"48b60af5-ee30-4605-8cbf-01c379981b3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.670331 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/09933a3a-4eb6-466f-af47-f3d4f6bb2709-proxy-tls\") pod \"machine-config-operator-74547568cd-gftmb\" (UID: \"09933a3a-4eb6-466f-af47-f3d4f6bb2709\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.668611 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a8b48e2c-cb80-4c41-bd8b-8a57190f5cca-signing-key\") pod \"service-ca-9c57cc56f-l4cnx\" (UID: \"a8b48e2c-cb80-4c41-bd8b-8a57190f5cca\") " pod="openshift-service-ca/service-ca-9c57cc56f-l4cnx" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.670931 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/17cbd4ea-f855-40e4-aa42-fa46efa2a393-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-6kp77\" (UID: \"17cbd4ea-f855-40e4-aa42-fa46efa2a393\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.671116 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8b5eab9d-beb2-4574-bd14-831322b154d4-webhook-cert\") pod \"packageserver-d55dfcdfc-h6zvh\" (UID: \"8b5eab9d-beb2-4574-bd14-831322b154d4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.671465 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-registry-tls\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.671807 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/09933a3a-4eb6-466f-af47-f3d4f6bb2709-auth-proxy-config\") pod \"machine-config-operator-74547568cd-gftmb\" (UID: \"09933a3a-4eb6-466f-af47-f3d4f6bb2709\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.672272 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/79103034-e1a4-44b1-bffc-e9edc76da393-installation-pull-secrets\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.673211 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cf016b2d-7155-49d8-9675-567dc1cb1dfe-metrics-tls\") pod \"dns-default-hn5l6\" (UID: \"cf016b2d-7155-49d8-9675-567dc1cb1dfe\") " pod="openshift-dns/dns-default-hn5l6" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.674206 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/48b60af5-ee30-4605-8cbf-01c379981b3e-srv-cert\") pod \"catalog-operator-68c6474976-kvg6b\" (UID: \"48b60af5-ee30-4605-8cbf-01c379981b3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.674216 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/3efe95b6-b728-4044-b3e7-076482b73a63-certs\") pod \"machine-config-server-cvf68\" (UID: \"3efe95b6-b728-4044-b3e7-076482b73a63\") " pod="openshift-machine-config-operator/machine-config-server-cvf68" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.675215 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/3efe95b6-b728-4044-b3e7-076482b73a63-node-bootstrap-token\") pod \"machine-config-server-cvf68\" (UID: \"3efe95b6-b728-4044-b3e7-076482b73a63\") " pod="openshift-machine-config-operator/machine-config-server-cvf68" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.675964 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzgms\" (UniqueName: \"kubernetes.io/projected/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-kube-api-access-vzgms\") pod \"marketplace-operator-79b997595-crmqj\" (UID: \"5de2f44b-564a-461c-b9e1-b4b306d8ecb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.678892 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8b5eab9d-beb2-4574-bd14-831322b154d4-apiservice-cert\") pod \"packageserver-d55dfcdfc-h6zvh\" (UID: \"8b5eab9d-beb2-4574-bd14-831322b154d4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.678995 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e05235cb-84ad-45ae-9fd3-02803c0fd752-proxy-tls\") pod \"machine-config-controller-84d6567774-j5lrz\" (UID: \"e05235cb-84ad-45ae-9fd3-02803c0fd752\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.679717 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/baa88468-def9-4705-8a15-a97e14eba154-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-wqmsp\" (UID: \"baa88468-def9-4705-8a15-a97e14eba154\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wqmsp" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.696122 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfjth\" (UniqueName: \"kubernetes.io/projected/cf016b2d-7155-49d8-9675-567dc1cb1dfe-kube-api-access-kfjth\") pod \"dns-default-hn5l6\" (UID: \"cf016b2d-7155-49d8-9675-567dc1cb1dfe\") " pod="openshift-dns/dns-default-hn5l6" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.712172 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/460ee6fa-709e-4281-b1a7-3ca5f795d684-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bgprm\" (UID: \"460ee6fa-709e-4281-b1a7-3ca5f795d684\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm" Dec 13 06:47:53 crc kubenswrapper[4644]: W1213 06:47:53.719535 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda316aeb4_d464_4fc3_b4b7_7df5e00e68be.slice/crio-31be6c1eba70530d5413c5c15dbd0023b6104000e5ac2d7fea747cbca3f8b7dd WatchSource:0}: Error finding container 31be6c1eba70530d5413c5c15dbd0023b6104000e5ac2d7fea747cbca3f8b7dd: Status 404 returned error can't find the container with id 31be6c1eba70530d5413c5c15dbd0023b6104000e5ac2d7fea747cbca3f8b7dd Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.730231 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6xwbf"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.741065 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mh84f"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.742028 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtws7\" (UniqueName: \"kubernetes.io/projected/efc4a9ab-d47c-4425-aecf-746386c5ee97-kube-api-access-qtws7\") pod \"migrator-59844c95c7-9nmb7\" (UID: \"efc4a9ab-d47c-4425-aecf-746386c5ee97\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9nmb7" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.745732 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.745952 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvl2k\" (UniqueName: \"kubernetes.io/projected/c2506a59-7a09-4c47-aead-bdb7aaec207f-kube-api-access-pvl2k\") pod \"ingress-canary-h4qgz\" (UID: \"c2506a59-7a09-4c47-aead-bdb7aaec207f\") " pod="openshift-ingress-canary/ingress-canary-h4qgz" Dec 13 06:47:53 crc kubenswrapper[4644]: E1213 06:47:53.746133 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:54.246109498 +0000 UTC m=+136.461060330 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.746324 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c2506a59-7a09-4c47-aead-bdb7aaec207f-cert\") pod \"ingress-canary-h4qgz\" (UID: \"c2506a59-7a09-4c47-aead-bdb7aaec207f\") " pod="openshift-ingress-canary/ingress-canary-h4qgz" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.746452 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: E1213 06:47:53.746737 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:54.246727469 +0000 UTC m=+136.461678303 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.755200 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c2506a59-7a09-4c47-aead-bdb7aaec207f-cert\") pod \"ingress-canary-h4qgz\" (UID: \"c2506a59-7a09-4c47-aead-bdb7aaec207f\") " pod="openshift-ingress-canary/ingress-canary-h4qgz" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.758027 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsfjs\" (UniqueName: \"kubernetes.io/projected/f8aab518-1735-49f5-98af-68d01e096132-kube-api-access-lsfjs\") pod \"collect-profiles-29426805-wg25g\" (UID: \"f8aab518-1735-49f5-98af-68d01e096132\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.779069 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkssc\" (UniqueName: \"kubernetes.io/projected/baa88468-def9-4705-8a15-a97e14eba154-kube-api-access-tkssc\") pod \"multus-admission-controller-857f4d67dd-wqmsp\" (UID: \"baa88468-def9-4705-8a15-a97e14eba154\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wqmsp" Dec 13 06:47:53 crc kubenswrapper[4644]: W1213 06:47:53.791081 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfde8db32_8ec8_4f55_b401_2a796bc1d353.slice/crio-b1fda8ee66968f57ef877b608c79288047ea28d6023ea06ef4f71682de8970a9 WatchSource:0}: Error finding container b1fda8ee66968f57ef877b608c79288047ea28d6023ea06ef4f71682de8970a9: Status 404 returned error can't find the container with id b1fda8ee66968f57ef877b608c79288047ea28d6023ea06ef4f71682de8970a9 Dec 13 06:47:53 crc kubenswrapper[4644]: W1213 06:47:53.794522 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81fbb35d_bcd1_4c92_bc7d_2fcbd17c1d64.slice/crio-6302f161f1f349b08e2b4f03de62c48c9ff6a4180f9325c48b9a1b35bcd5a488 WatchSource:0}: Error finding container 6302f161f1f349b08e2b4f03de62c48c9ff6a4180f9325c48b9a1b35bcd5a488: Status 404 returned error can't find the container with id 6302f161f1f349b08e2b4f03de62c48c9ff6a4180f9325c48b9a1b35bcd5a488 Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.797388 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhktw\" (UniqueName: \"kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-kube-api-access-zhktw\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.812266 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpn5z\" (UniqueName: \"kubernetes.io/projected/3efe95b6-b728-4044-b3e7-076482b73a63-kube-api-access-kpn5z\") pod \"machine-config-server-cvf68\" (UID: \"3efe95b6-b728-4044-b3e7-076482b73a63\") " pod="openshift-machine-config-operator/machine-config-server-cvf68" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.851312 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:53 crc kubenswrapper[4644]: E1213 06:47:53.851536 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:54.351520581 +0000 UTC m=+136.566471414 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.851904 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: E1213 06:47:53.852477 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:54.352433739 +0000 UTC m=+136.567384571 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.864208 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkwxr\" (UniqueName: \"kubernetes.io/projected/a8b48e2c-cb80-4c41-bd8b-8a57190f5cca-kube-api-access-vkwxr\") pod \"service-ca-9c57cc56f-l4cnx\" (UID: \"a8b48e2c-cb80-4c41-bd8b-8a57190f5cca\") " pod="openshift-service-ca/service-ca-9c57cc56f-l4cnx" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.881622 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-d7jk5"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.883231 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.884501 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7drp\" (UniqueName: \"kubernetes.io/projected/358d9360-7d08-47de-b8f5-81205f934c81-kube-api-access-g7drp\") pod \"kube-storage-version-migrator-operator-b67b599dd-wdd25\" (UID: \"358d9360-7d08-47de-b8f5-81205f934c81\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.894666 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9nmb7" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.895269 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.903790 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbkff\" (UniqueName: \"kubernetes.io/projected/17cbd4ea-f855-40e4-aa42-fa46efa2a393-kube-api-access-hbkff\") pod \"package-server-manager-789f6589d5-6kp77\" (UID: \"17cbd4ea-f855-40e4-aa42-fa46efa2a393\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.905917 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.906931 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.916642 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.920328 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.922831 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.923800 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.924833 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-4z6jl"] Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.940955 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxllh\" (UniqueName: \"kubernetes.io/projected/8b5eab9d-beb2-4574-bd14-831322b154d4-kube-api-access-rxllh\") pod \"packageserver-d55dfcdfc-h6zvh\" (UID: \"8b5eab9d-beb2-4574-bd14-831322b154d4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.946277 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-bound-sa-token\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.947553 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-wqmsp" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.956505 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:53 crc kubenswrapper[4644]: E1213 06:47:53.956934 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:54.456859559 +0000 UTC m=+136.671810392 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.958496 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2hgn\" (UniqueName: \"kubernetes.io/projected/09933a3a-4eb6-466f-af47-f3d4f6bb2709-kube-api-access-q2hgn\") pod \"machine-config-operator-74547568cd-gftmb\" (UID: \"09933a3a-4eb6-466f-af47-f3d4f6bb2709\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.958610 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:53 crc kubenswrapper[4644]: E1213 06:47:53.959651 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:54.459634457 +0000 UTC m=+136.674585290 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.992027 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz4kx\" (UniqueName: \"kubernetes.io/projected/e05235cb-84ad-45ae-9fd3-02803c0fd752-kube-api-access-bz4kx\") pod \"machine-config-controller-84d6567774-j5lrz\" (UID: \"e05235cb-84ad-45ae-9fd3-02803c0fd752\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.994770 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-hn5l6" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.995604 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-cvf68" Dec 13 06:47:53 crc kubenswrapper[4644]: I1213 06:47:53.996165 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfpnj\" (UniqueName: \"kubernetes.io/projected/48b60af5-ee30-4605-8cbf-01c379981b3e-kube-api-access-mfpnj\") pod \"catalog-operator-68c6474976-kvg6b\" (UID: \"48b60af5-ee30-4605-8cbf-01c379981b3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.005124 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4"] Dec 13 06:47:54 crc kubenswrapper[4644]: W1213 06:47:54.011740 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8d9c95e_97f4_46f8_a3b7_016e7c228f88.slice/crio-a0f4682d55eaa028bc49cf78f03ee7a2a8283c5f51f782182e715ac4f10ed2c8 WatchSource:0}: Error finding container a0f4682d55eaa028bc49cf78f03ee7a2a8283c5f51f782182e715ac4f10ed2c8: Status 404 returned error can't find the container with id a0f4682d55eaa028bc49cf78f03ee7a2a8283c5f51f782182e715ac4f10ed2c8 Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.031549 4644 generic.go:334] "Generic (PLEG): container finished" podID="3e906e3d-d501-43fd-baae-8c5b606c7c58" containerID="a79dba2ec98355dc84ae96c310cc6a0da6049729e2ed3a26faac5cff69325b0d" exitCode=0 Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.032476 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" event={"ID":"3e906e3d-d501-43fd-baae-8c5b606c7c58","Type":"ContainerDied","Data":"a79dba2ec98355dc84ae96c310cc6a0da6049729e2ed3a26faac5cff69325b0d"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.034205 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" event={"ID":"3e906e3d-d501-43fd-baae-8c5b606c7c58","Type":"ContainerStarted","Data":"0ace3179011fce9e03c6661cb1f40d1df5c153eaf385116d384f37bbca6b1d04"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.033428 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvl2k\" (UniqueName: \"kubernetes.io/projected/c2506a59-7a09-4c47-aead-bdb7aaec207f-kube-api-access-pvl2k\") pod \"ingress-canary-h4qgz\" (UID: \"c2506a59-7a09-4c47-aead-bdb7aaec207f\") " pod="openshift-ingress-canary/ingress-canary-h4qgz" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.042931 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" event={"ID":"a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50","Type":"ContainerStarted","Data":"9a0eab8eda8ca03babdc1863bfa9545eeb801e3826afba37a8730f275348c4a2"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.042992 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" event={"ID":"a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50","Type":"ContainerStarted","Data":"4863a58a21e89127e693dccc1781b1df89898907af319947aaac3199026f69d9"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.048712 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.049863 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn"] Dec 13 06:47:54 crc kubenswrapper[4644]: W1213 06:47:54.052256 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d8d1851_76f3_46ad_8637_738acb08ea9b.slice/crio-1605e4fb4cf90c156a05c4a8f53a95e76ee3720c86870c1d3ec750513fed0c5f WatchSource:0}: Error finding container 1605e4fb4cf90c156a05c4a8f53a95e76ee3720c86870c1d3ec750513fed0c5f: Status 404 returned error can't find the container with id 1605e4fb4cf90c156a05c4a8f53a95e76ee3720c86870c1d3ec750513fed0c5f Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.052508 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-shtgr" event={"ID":"9315f9b2-8068-4cbb-88ad-5f4046b3dbdc","Type":"ContainerStarted","Data":"11f543d7fe7bc325e97ff61b0309981ed0aea1b1c253636868520206584d3ce7"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.052559 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-shtgr" event={"ID":"9315f9b2-8068-4cbb-88ad-5f4046b3dbdc","Type":"ContainerStarted","Data":"c5079388ab6368d0e87ddfd71841a4d9fd7b3450110b812c04911ac4abe43d6a"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.053464 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.055912 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vgkx8" event={"ID":"622db6f9-2148-4569-88a6-f37650895811","Type":"ContainerStarted","Data":"957d37b96d73d31a4c7c176dec4e9d62de0653a8683bd04f1a822a31396f1753"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.056158 4644 patch_prober.go:28] interesting pod/console-operator-58897d9998-shtgr container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/readyz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.056208 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-shtgr" podUID="9315f9b2-8068-4cbb-88ad-5f4046b3dbdc" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/readyz\": dial tcp 10.217.0.24:8443: connect: connection refused" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.063743 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" event={"ID":"a316aeb4-d464-4fc3-b4b7-7df5e00e68be","Type":"ContainerStarted","Data":"31be6c1eba70530d5413c5c15dbd0023b6104000e5ac2d7fea747cbca3f8b7dd"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.065388 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mrjxs"] Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.065516 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:54 crc kubenswrapper[4644]: E1213 06:47:54.065972 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:54.565953368 +0000 UTC m=+136.780904202 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.070981 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" event={"ID":"7732124a-a282-433a-83e7-040dd881ac56","Type":"ContainerStarted","Data":"98f8d15ce11e07ac800742d9600ae0f50d0124d05b74046a743c21d57a544b48"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.071707 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-hmhwd"] Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.080036 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" event={"ID":"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64","Type":"ContainerStarted","Data":"6302f161f1f349b08e2b4f03de62c48c9ff6a4180f9325c48b9a1b35bcd5a488"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.088933 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-wfsz4"] Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.095599 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc" event={"ID":"70f75bc7-a420-41c8-9b46-2cf4039210bd","Type":"ContainerStarted","Data":"f5ec9735a2a42e99dd2df9ee8e5343f3261f3a447226edd43f3013bae3b4865a"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.095641 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc" event={"ID":"70f75bc7-a420-41c8-9b46-2cf4039210bd","Type":"ContainerStarted","Data":"ccd6c291005eb628466bcea610ef01b0a33b24463e4147022495d363bc03b59e"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.104382 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" event={"ID":"fde8db32-8ec8-4f55-b401-2a796bc1d353","Type":"ContainerStarted","Data":"b1fda8ee66968f57ef877b608c79288047ea28d6023ea06ef4f71682de8970a9"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.107647 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm" event={"ID":"901d2740-8e94-4290-8a30-be45a95d5f74","Type":"ContainerStarted","Data":"f29096e15da4a8e6096056ff70b115b3b2efb0ab6263dacd03255077c649d7fc"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.112692 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-dtxqg" event={"ID":"cd4c2102-9555-4eb4-8d7e-91d8c7020a0e","Type":"ContainerStarted","Data":"c6163698d686de7f226c7f2cd965709b666df6fe763b83a64d8424fae6d325a2"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.112727 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-dtxqg" event={"ID":"cd4c2102-9555-4eb4-8d7e-91d8c7020a0e","Type":"ContainerStarted","Data":"fd1c439af3588d9bd8e9a6a834d67a5d687977a8ffcc9a04150338df8c1fd8d5"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.112901 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-dtxqg" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.117107 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" event={"ID":"8fc32d46-8130-4cd4-a3a6-7e2377a67785","Type":"ContainerStarted","Data":"dc82418991e79e63bae6b3a641e685e180ea5ac04b9871ade9b14151d6736e93"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.117718 4644 patch_prober.go:28] interesting pod/downloads-7954f5f757-dtxqg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.117770 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dtxqg" podUID="cd4c2102-9555-4eb4-8d7e-91d8c7020a0e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.121511 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-vfb94" event={"ID":"3f1dbe36-7205-4fac-9ce9-0194d29a6bba","Type":"ContainerStarted","Data":"83ed705253bb1a94ec2eeb60d0696669893cb73081504ed320a093b49a38d0e2"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.134049 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" event={"ID":"f0e9ddf9-d763-42e9-8682-7aa45939292d","Type":"ContainerStarted","Data":"600bb30dd979add1cc1c8804ea6df89af3fc9d3a41cc31199fd20bfa28ab776d"} Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.152226 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.159082 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-l4cnx" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.165935 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.168322 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:54 crc kubenswrapper[4644]: E1213 06:47:54.169545 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:54.669526866 +0000 UTC m=+136.884477699 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.171208 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.184718 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.200587 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.236524 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.270673 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:54 crc kubenswrapper[4644]: E1213 06:47:54.271594 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:54.771578712 +0000 UTC m=+136.986529545 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.284571 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm"] Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.301821 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-h4qgz" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.378041 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:54 crc kubenswrapper[4644]: E1213 06:47:54.379471 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:54.879452907 +0000 UTC m=+137.094403740 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.472527 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g"] Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.480664 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:54 crc kubenswrapper[4644]: E1213 06:47:54.480768 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:54.980751607 +0000 UTC m=+137.195702440 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.480971 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:54 crc kubenswrapper[4644]: E1213 06:47:54.481431 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:54.98142313 +0000 UTC m=+137.196373963 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.531712 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-crmqj"] Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.555611 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.558604 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:47:54 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:47:54 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:47:54 crc kubenswrapper[4644]: healthz check failed Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.558672 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.576270 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-wqmsp"] Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.584259 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:54 crc kubenswrapper[4644]: E1213 06:47:54.584845 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:55.084825726 +0000 UTC m=+137.299776559 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.623576 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9nmb7"] Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.686427 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:54 crc kubenswrapper[4644]: E1213 06:47:54.687111 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:55.187095182 +0000 UTC m=+137.402046014 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.697244 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-hn5l6"] Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.787890 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:54 crc kubenswrapper[4644]: E1213 06:47:54.788540 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:55.288523965 +0000 UTC m=+137.503474798 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.792557 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-shtgr" podStartSLOduration=118.792534448 podStartE2EDuration="1m58.792534448s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:54.788847905 +0000 UTC m=+137.003798737" watchObservedRunningTime="2025-12-13 06:47:54.792534448 +0000 UTC m=+137.007485281" Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.812476 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-vfb94" podStartSLOduration=118.812457607 podStartE2EDuration="1m58.812457607s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:54.811487282 +0000 UTC m=+137.026438115" watchObservedRunningTime="2025-12-13 06:47:54.812457607 +0000 UTC m=+137.027408440" Dec 13 06:47:54 crc kubenswrapper[4644]: W1213 06:47:54.821511 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf016b2d_7155_49d8_9675_567dc1cb1dfe.slice/crio-ab86ba64af792ed69685ab4572e5bec4bd5f9ec21e30aa2753915cbdf19cb480 WatchSource:0}: Error finding container ab86ba64af792ed69685ab4572e5bec4bd5f9ec21e30aa2753915cbdf19cb480: Status 404 returned error can't find the container with id ab86ba64af792ed69685ab4572e5bec4bd5f9ec21e30aa2753915cbdf19cb480 Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.871909 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-l4cnx"] Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.889380 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:54 crc kubenswrapper[4644]: E1213 06:47:54.889769 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:55.389755699 +0000 UTC m=+137.604706532 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:54 crc kubenswrapper[4644]: I1213 06:47:54.996912 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:54 crc kubenswrapper[4644]: E1213 06:47:54.997857 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:55.497842143 +0000 UTC m=+137.712792976 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:55 crc kubenswrapper[4644]: W1213 06:47:55.035622 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8b48e2c_cb80_4c41_bd8b_8a57190f5cca.slice/crio-b69f8c935d13044524aa3edd2783c5e90a4cc50c554074cce883c21dd30c92b4 WatchSource:0}: Error finding container b69f8c935d13044524aa3edd2783c5e90a4cc50c554074cce883c21dd30c92b4: Status 404 returned error can't find the container with id b69f8c935d13044524aa3edd2783c5e90a4cc50c554074cce883c21dd30c92b4 Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.100250 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:55 crc kubenswrapper[4644]: E1213 06:47:55.100694 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:55.600683094 +0000 UTC m=+137.815633927 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.141022 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" event={"ID":"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64","Type":"ContainerStarted","Data":"ffd1633bf05c6df1c64bb80b5d05803719fcf2c6c7dab02af4097649fadf7608"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.141314 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.143000 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" event={"ID":"f8aab518-1735-49f5-98af-68d01e096132","Type":"ContainerStarted","Data":"e0b5e0a611f63aba48e8177e2c5a63451addbaa7f14a79c88162310ef7468dd6"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.146045 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-d7jk5" event={"ID":"773a896a-a43c-4a09-a6e7-42063bf34606","Type":"ContainerStarted","Data":"b962af02fe00cfca650c27a7ef79232d250a70c92aea6b212c2bbc6fc02514aa"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.146077 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-d7jk5" event={"ID":"773a896a-a43c-4a09-a6e7-42063bf34606","Type":"ContainerStarted","Data":"aa32b6d593280679338c9678c21f714761776b914c4de6b5d16435ebb4d3e74e"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.147233 4644 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-mh84f container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.147307 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" podUID="81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.153281 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm" event={"ID":"460ee6fa-709e-4281-b1a7-3ca5f795d684","Type":"ContainerStarted","Data":"ee229465788b4370273675ebf8452d774243972ebf65bfb771245d93aff35df1"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.177972 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mrjxs" event={"ID":"29418836-a9b6-42c2-90b1-755ff73fe3fa","Type":"ContainerStarted","Data":"ad98d8aad612b0f4f5d3d45dc0eb2f134ff45341e14ab6ead4c099eb8fbd9b71"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.178719 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" podStartSLOduration=119.178702823 podStartE2EDuration="1m59.178702823s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:55.178126008 +0000 UTC m=+137.393076841" watchObservedRunningTime="2025-12-13 06:47:55.178702823 +0000 UTC m=+137.393653656" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.191335 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9nmb7" event={"ID":"efc4a9ab-d47c-4425-aecf-746386c5ee97","Type":"ContainerStarted","Data":"dbfbcd099dbb10e925479b99e2451eb5e0c98d6e62aeb654979b70c80425c105"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.198029 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" event={"ID":"16dd8123-4c76-4fa6-8792-d1fe4a68b782","Type":"ContainerStarted","Data":"2fa7d2c883b80d6fe9cb42902024655777325fd4c590a45bf66b7346d639e70c"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.198070 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" event={"ID":"16dd8123-4c76-4fa6-8792-d1fe4a68b782","Type":"ContainerStarted","Data":"0427aee84e88457458aefb81434bc76944e8d76277385bbad13902a025fcb050"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.201022 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:55 crc kubenswrapper[4644]: E1213 06:47:55.202258 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:55.702242413 +0000 UTC m=+137.917193247 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.228761 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" event={"ID":"7732124a-a282-433a-83e7-040dd881ac56","Type":"ContainerStarted","Data":"d48cbbd311ec114229177eb80cb3aceb43e4cb6c02f68bbb3cd6e4f01094e90d"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.246911 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-l4cnx" event={"ID":"a8b48e2c-cb80-4c41-bd8b-8a57190f5cca","Type":"ContainerStarted","Data":"b69f8c935d13044524aa3edd2783c5e90a4cc50c554074cce883c21dd30c92b4"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.294074 4644 generic.go:334] "Generic (PLEG): container finished" podID="a316aeb4-d464-4fc3-b4b7-7df5e00e68be" containerID="225b0ce26eca4f399a078a74e6627736b3e27ab6868653ffe8fa643260ddddd4" exitCode=0 Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.294314 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" event={"ID":"a316aeb4-d464-4fc3-b4b7-7df5e00e68be","Type":"ContainerDied","Data":"225b0ce26eca4f399a078a74e6627736b3e27ab6868653ffe8fa643260ddddd4"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.310672 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.312490 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-hn5l6" event={"ID":"cf016b2d-7155-49d8-9675-567dc1cb1dfe","Type":"ContainerStarted","Data":"ab86ba64af792ed69685ab4572e5bec4bd5f9ec21e30aa2753915cbdf19cb480"} Dec 13 06:47:55 crc kubenswrapper[4644]: E1213 06:47:55.313362 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:55.813347745 +0000 UTC m=+138.028298578 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.340240 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" event={"ID":"8fc32d46-8130-4cd4-a3a6-7e2377a67785","Type":"ContainerStarted","Data":"27f79b8850b6e6c4fc4bc50c344b67f7a1861770c8242bac6c5392352cdce5b1"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.345376 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" event={"ID":"8fc32d46-8130-4cd4-a3a6-7e2377a67785","Type":"ContainerStarted","Data":"389078f897a71f3599cddd1125d80700adb6768ee75b58b7221248aed71aa8aa"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.354220 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-lknvc" podStartSLOduration=119.354194876 podStartE2EDuration="1m59.354194876s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:55.334186977 +0000 UTC m=+137.549137810" watchObservedRunningTime="2025-12-13 06:47:55.354194876 +0000 UTC m=+137.569145729" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.357376 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" event={"ID":"a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50","Type":"ContainerStarted","Data":"4367a336efb59fbcca6222c986a39bf050c981b096286c37fdcecfffd2c939ea"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.378472 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb"] Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.399286 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" event={"ID":"5de2f44b-564a-461c-b9e1-b4b306d8ecb1","Type":"ContainerStarted","Data":"0382c6b7e3c2c56dcdebc42e8370caaae8c7a207413272139b4465a0dbef8cf1"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.411550 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:55 crc kubenswrapper[4644]: E1213 06:47:55.412931 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:55.912913065 +0000 UTC m=+138.127863898 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.459612 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" event={"ID":"3e906e3d-d501-43fd-baae-8c5b606c7c58","Type":"ContainerStarted","Data":"025d402bd464e73fa0998f3e778dc8efd937f09a5d99dbc009c872a2ef06288e"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.504725 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" event={"ID":"a8d9c95e-97f4-46f8-a3b7-016e7c228f88","Type":"ContainerStarted","Data":"33a9c2e5633b7bcd1c50725f0488e5733c97ba032f785f0bcf322211fd9edc52"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.505012 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" event={"ID":"a8d9c95e-97f4-46f8-a3b7-016e7c228f88","Type":"ContainerStarted","Data":"a0f4682d55eaa028bc49cf78f03ee7a2a8283c5f51f782182e715ac4f10ed2c8"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.505908 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.507328 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-dtxqg" podStartSLOduration=119.507319863 podStartE2EDuration="1m59.507319863s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:55.506786992 +0000 UTC m=+137.721737824" watchObservedRunningTime="2025-12-13 06:47:55.507319863 +0000 UTC m=+137.722270696" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.513086 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:55 crc kubenswrapper[4644]: E1213 06:47:55.513547 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:56.013535092 +0000 UTC m=+138.228485925 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.548891 4644 generic.go:334] "Generic (PLEG): container finished" podID="f0e9ddf9-d763-42e9-8682-7aa45939292d" containerID="82ce11bed8582e9daf062b8388a9b07c8a943bcfdcf94abb2b3fa610721ada7e" exitCode=0 Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.548993 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" event={"ID":"f0e9ddf9-d763-42e9-8682-7aa45939292d","Type":"ContainerDied","Data":"82ce11bed8582e9daf062b8388a9b07c8a943bcfdcf94abb2b3fa610721ada7e"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.549023 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" event={"ID":"f0e9ddf9-d763-42e9-8682-7aa45939292d","Type":"ContainerStarted","Data":"ab76e77d5330e7a13ec0e3d776c707df8fbd5a1163129a8856f3ea75e70edd28"} Dec 13 06:47:55 crc kubenswrapper[4644]: W1213 06:47:55.549235 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09933a3a_4eb6_466f_af47_f3d4f6bb2709.slice/crio-8c4a5234af0f703c5b31cb447e59105e9d0467fc30094cfb5a3980c4089ada3c WatchSource:0}: Error finding container 8c4a5234af0f703c5b31cb447e59105e9d0467fc30094cfb5a3980c4089ada3c: Status 404 returned error can't find the container with id 8c4a5234af0f703c5b31cb447e59105e9d0467fc30094cfb5a3980c4089ada3c Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.549760 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.571414 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:47:55 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:47:55 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:47:55 crc kubenswrapper[4644]: healthz check failed Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.571484 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.575867 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.620108 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:55 crc kubenswrapper[4644]: E1213 06:47:55.621357 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:56.12133576 +0000 UTC m=+138.336286593 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.647792 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm" podStartSLOduration=119.647766086 podStartE2EDuration="1m59.647766086s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:55.647091748 +0000 UTC m=+137.862042582" watchObservedRunningTime="2025-12-13 06:47:55.647766086 +0000 UTC m=+137.862716919" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.675373 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" event={"ID":"6c1981b4-cec6-4c20-807a-982199dc5f81","Type":"ContainerStarted","Data":"99a0ff3d596cbf53b35b40cab35664c4119ab1de2e5c97133f0b93ad189090be"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.675423 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" event={"ID":"6c1981b4-cec6-4c20-807a-982199dc5f81","Type":"ContainerStarted","Data":"359c282129b47951d6563d249f43d95b3ef3df5b93efacce1139bc33ca9cf8a1"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.677558 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh"] Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.694841 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" event={"ID":"33355e8a-5a62-49d4-8c71-ab546cdbf141","Type":"ContainerStarted","Data":"05e30bf26d1a4e1323853cdfb4869fe4888321c239cf20ef9299c4bb45b67ba1"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.695080 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" event={"ID":"33355e8a-5a62-49d4-8c71-ab546cdbf141","Type":"ContainerStarted","Data":"cb2a4eb8f17e723ea45fab3bcf7a12ee30fb789abc62be00d64255deb87fe573"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.696099 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.737923 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:55 crc kubenswrapper[4644]: E1213 06:47:55.738255 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:56.23824122 +0000 UTC m=+138.453192053 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.761994 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.774704 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xmrpm" event={"ID":"901d2740-8e94-4290-8a30-be45a95d5f74","Type":"ContainerStarted","Data":"fb2229a9feaebcbf781c6f2f92028368e4b166ecc291617301c5a9644ab16290"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.834560 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4" event={"ID":"9d8d1851-76f3-46ad-8637-738acb08ea9b","Type":"ContainerStarted","Data":"bc51c32db6ab14ef3d430c1cb69b5624c5b93e4f77d2dace9b8439bedf383ace"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.834863 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4" event={"ID":"9d8d1851-76f3-46ad-8637-738acb08ea9b","Type":"ContainerStarted","Data":"1605e4fb4cf90c156a05c4a8f53a95e76ee3720c86870c1d3ec750513fed0c5f"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.843083 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:55 crc kubenswrapper[4644]: E1213 06:47:55.844311 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:56.344279884 +0000 UTC m=+138.559230717 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.847416 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-jjpl8" podStartSLOduration=119.847396336 podStartE2EDuration="1m59.847396336s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:55.845822485 +0000 UTC m=+138.060773318" watchObservedRunningTime="2025-12-13 06:47:55.847396336 +0000 UTC m=+138.062347169" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.863384 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" event={"ID":"8eafd0da-eec5-48e4-9ff3-0e8367d338c2","Type":"ContainerStarted","Data":"3491bb46a79f18dc642a9e748c41a2c55bfa4275d87c9ce9ff978fa861b7723a"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.887088 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77"] Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.887130 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25"] Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.902265 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps" event={"ID":"3e62a18e-fc3a-4859-9a12-cd740f5ee8a1","Type":"ContainerStarted","Data":"7e7fb390b8eaa8d3179397ea3c49bb3cfd44f8111a3620afd4804b37b2b6503b"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.902311 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps" event={"ID":"3e62a18e-fc3a-4859-9a12-cd740f5ee8a1","Type":"ContainerStarted","Data":"af152a1c0fa39b6ec51ed6a6bb61964dc9c10645d69c17c3bcd7b61b1b1dd5ad"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.904981 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wfsz4" event={"ID":"ac23aa18-ed6d-4ea9-b720-aa7ccb164459","Type":"ContainerStarted","Data":"7c28f4d483df4b9e209f42b0791c3dc4b4a886df284819842ead372469a2387f"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.912215 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" event={"ID":"b9059962-adc6-4278-aead-d07a310b9776","Type":"ContainerStarted","Data":"0e360132f9528575592219d90a1f6581252654079c386505aeb050e6c1209234"} Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.912961 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.932399 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rws8k" podStartSLOduration=119.932384229 podStartE2EDuration="1m59.932384229s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:55.930979518 +0000 UTC m=+138.145930351" watchObservedRunningTime="2025-12-13 06:47:55.932384229 +0000 UTC m=+138.147335062" Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.936187 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz"] Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.936398 4644 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-4z6jl container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" start-of-body= Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.936524 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" podUID="b9059962-adc6-4278-aead-d07a310b9776" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" Dec 13 06:47:55 crc kubenswrapper[4644]: E1213 06:47:55.945536 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:56.44552311 +0000 UTC m=+138.660473942 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:55 crc kubenswrapper[4644]: I1213 06:47:55.946082 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.010366 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-c69v8" podStartSLOduration=120.01034603 podStartE2EDuration="2m0.01034603s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:55.963071483 +0000 UTC m=+138.178022316" watchObservedRunningTime="2025-12-13 06:47:56.01034603 +0000 UTC m=+138.225296863" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.012961 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-h4qgz"] Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.037132 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-wqmsp" event={"ID":"baa88468-def9-4705-8a15-a97e14eba154","Type":"ContainerStarted","Data":"6d137d87418792e96145f64b509854f502d3671e5d4e3b57daa34ba2633c1df4"} Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.047798 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:56 crc kubenswrapper[4644]: E1213 06:47:56.049185 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:56.549170807 +0000 UTC m=+138.764121640 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:56 crc kubenswrapper[4644]: W1213 06:47:56.106873 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod17cbd4ea_f855_40e4_aa42_fa46efa2a393.slice/crio-2c25b278b304695ff2c5d27a98947d50af96816fabe834c42a976ad7ff4a79d8 WatchSource:0}: Error finding container 2c25b278b304695ff2c5d27a98947d50af96816fabe834c42a976ad7ff4a79d8: Status 404 returned error can't find the container with id 2c25b278b304695ff2c5d27a98947d50af96816fabe834c42a976ad7ff4a79d8 Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.135501 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vgkx8" event={"ID":"622db6f9-2148-4569-88a6-f37650895811","Type":"ContainerStarted","Data":"3316745420fe22318ce8758c458ff7df785378f91997fc75c068e884661fbf51"} Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.135550 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vgkx8" event={"ID":"622db6f9-2148-4569-88a6-f37650895811","Type":"ContainerStarted","Data":"edb5d2ada39fd2b4126e661c78acb45091ae8926236259e6a5718f4c6dbf4584"} Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.152228 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:56 crc kubenswrapper[4644]: E1213 06:47:56.152849 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:56.652834995 +0000 UTC m=+138.867785828 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.177037 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-6xwbf" event={"ID":"fde8db32-8ec8-4f55-b401-2a796bc1d353","Type":"ContainerStarted","Data":"9b785cd68b66fcf07a28753d628e4ee5a325bca15199248e2d751ab2fba076df"} Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.186776 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" podStartSLOduration=120.186754776 podStartE2EDuration="2m0.186754776s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:56.035004344 +0000 UTC m=+138.249955178" watchObservedRunningTime="2025-12-13 06:47:56.186754776 +0000 UTC m=+138.401705610" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.187993 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b"] Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.244251 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" podStartSLOduration=120.244230018 podStartE2EDuration="2m0.244230018s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:56.241806902 +0000 UTC m=+138.456757725" watchObservedRunningTime="2025-12-13 06:47:56.244230018 +0000 UTC m=+138.459180852" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.254562 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:56 crc kubenswrapper[4644]: E1213 06:47:56.256319 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:56.756286533 +0000 UTC m=+138.971237367 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.256581 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:56 crc kubenswrapper[4644]: E1213 06:47:56.256861 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:56.756852036 +0000 UTC m=+138.971802869 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.262134 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-vfb94" event={"ID":"3f1dbe36-7205-4fac-9ce9-0194d29a6bba","Type":"ContainerStarted","Data":"fe967be89aca90814e9bcb212bf73cd9ba2784545525a9a3eb2c6d282583760c"} Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.339459 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-cvf68" event={"ID":"3efe95b6-b728-4044-b3e7-076482b73a63","Type":"ContainerStarted","Data":"32d9c4e0fcc95f79b50bb5bc23ddd5c372bdb8d9d4622c233f87ca0123e8c43a"} Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.359473 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rpj6t" podStartSLOduration=120.359452314 podStartE2EDuration="2m0.359452314s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:56.315818122 +0000 UTC m=+138.530768955" watchObservedRunningTime="2025-12-13 06:47:56.359452314 +0000 UTC m=+138.574403147" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.362425 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:56 crc kubenswrapper[4644]: E1213 06:47:56.364284 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:56.864268261 +0000 UTC m=+139.079219094 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.367477 4644 patch_prober.go:28] interesting pod/downloads-7954f5f757-dtxqg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.367529 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dtxqg" podUID="cd4c2102-9555-4eb4-8d7e-91d8c7020a0e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.368211 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z" event={"ID":"85a3c5f0-2d32-465d-9c1e-689f35f25507","Type":"ContainerStarted","Data":"93768c6354e9e71e21432a60355337a35d22fcd6a2705ad69715d3b0cda87a73"} Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.368251 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z" event={"ID":"85a3c5f0-2d32-465d-9c1e-689f35f25507","Type":"ContainerStarted","Data":"96ebf529bc9acbe9d3b10e63f79158834864206111fafcaeb585edbe724c3b8b"} Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.382701 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-shtgr" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.386103 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" podStartSLOduration=120.386087076 podStartE2EDuration="2m0.386087076s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:56.381000138 +0000 UTC m=+138.595950971" watchObservedRunningTime="2025-12-13 06:47:56.386087076 +0000 UTC m=+138.601037909" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.386728 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-k88nl" podStartSLOduration=120.386721058 podStartE2EDuration="2m0.386721058s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:56.361754484 +0000 UTC m=+138.576705317" watchObservedRunningTime="2025-12-13 06:47:56.386721058 +0000 UTC m=+138.601671890" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.415051 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-cvf68" podStartSLOduration=6.415033573 podStartE2EDuration="6.415033573s" podCreationTimestamp="2025-12-13 06:47:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:56.413892347 +0000 UTC m=+138.628843170" watchObservedRunningTime="2025-12-13 06:47:56.415033573 +0000 UTC m=+138.629984407" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.464787 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:56 crc kubenswrapper[4644]: E1213 06:47:56.467224 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:56.967206804 +0000 UTC m=+139.182157638 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.487898 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" podStartSLOduration=120.487878722 podStartE2EDuration="2m0.487878722s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:56.485969441 +0000 UTC m=+138.700920274" watchObservedRunningTime="2025-12-13 06:47:56.487878722 +0000 UTC m=+138.702829545" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.555334 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:47:56 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:47:56 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:47:56 crc kubenswrapper[4644]: healthz check failed Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.555379 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.576583 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:56 crc kubenswrapper[4644]: E1213 06:47:56.577367 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:57.077349596 +0000 UTC m=+139.292300429 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.578620 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-lzb4z" podStartSLOduration=120.578597383 podStartE2EDuration="2m0.578597383s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:56.526783038 +0000 UTC m=+138.741733860" watchObservedRunningTime="2025-12-13 06:47:56.578597383 +0000 UTC m=+138.793548217" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.579021 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-wfsz4" podStartSLOduration=120.579013697 podStartE2EDuration="2m0.579013697s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:56.577566243 +0000 UTC m=+138.792517077" watchObservedRunningTime="2025-12-13 06:47:56.579013697 +0000 UTC m=+138.793964529" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.627861 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2h6ps" podStartSLOduration=120.627839642 podStartE2EDuration="2m0.627839642s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:56.624220837 +0000 UTC m=+138.839171669" watchObservedRunningTime="2025-12-13 06:47:56.627839642 +0000 UTC m=+138.842790474" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.651631 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vgkx8" podStartSLOduration=120.651614214 podStartE2EDuration="2m0.651614214s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:56.650789503 +0000 UTC m=+138.865740326" watchObservedRunningTime="2025-12-13 06:47:56.651614214 +0000 UTC m=+138.866565047" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.678239 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:56 crc kubenswrapper[4644]: E1213 06:47:56.679020 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:57.179001801 +0000 UTC m=+139.393952634 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.742933 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pdwp4" podStartSLOduration=120.742909911 podStartE2EDuration="2m0.742909911s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:56.711529825 +0000 UTC m=+138.926480658" watchObservedRunningTime="2025-12-13 06:47:56.742909911 +0000 UTC m=+138.957860744" Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.784805 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:56 crc kubenswrapper[4644]: E1213 06:47:56.785006 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:57.284980512 +0000 UTC m=+139.499931345 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.785109 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:56 crc kubenswrapper[4644]: E1213 06:47:56.785559 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:57.285547128 +0000 UTC m=+139.500497961 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:56 crc kubenswrapper[4644]: I1213 06:47:56.895280 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:56 crc kubenswrapper[4644]: E1213 06:47:56.896144 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:57.396127062 +0000 UTC m=+139.611077895 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.002522 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:57 crc kubenswrapper[4644]: E1213 06:47:57.002956 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:57.502943299 +0000 UTC m=+139.717894132 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.104561 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:57 crc kubenswrapper[4644]: E1213 06:47:57.105427 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:57.605408531 +0000 UTC m=+139.820359364 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.208727 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:57 crc kubenswrapper[4644]: E1213 06:47:57.209128 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:57.70911562 +0000 UTC m=+139.924066453 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.310826 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:57 crc kubenswrapper[4644]: E1213 06:47:57.311599 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:57.811581214 +0000 UTC m=+140.026532047 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.412405 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:57 crc kubenswrapper[4644]: E1213 06:47:57.413046 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:57.913034244 +0000 UTC m=+140.127985077 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.430166 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-h4qgz" event={"ID":"c2506a59-7a09-4c47-aead-bdb7aaec207f","Type":"ContainerStarted","Data":"13143d03a7c36131ef7aa44846d5696eff56a80dfa16c23a5eeb74436f3cd916"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.431223 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-h4qgz" event={"ID":"c2506a59-7a09-4c47-aead-bdb7aaec207f","Type":"ContainerStarted","Data":"d7201dffa82300acf8b20a4a61eb10c354b5e07bf57793e0bb3dae58b9fa4507"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.433031 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" event={"ID":"f8aab518-1735-49f5-98af-68d01e096132","Type":"ContainerStarted","Data":"556da2544417e0c0d8dc849dbaf7bf0d3727f9f9fb668868c86ebbe5af570a7c"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.438554 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" event={"ID":"5de2f44b-564a-461c-b9e1-b4b306d8ecb1","Type":"ContainerStarted","Data":"15fa61a9e529b0e0eca5efafb4d687d9e4b08ddb595b16a2abdea0c61eff7963"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.439316 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.440721 4644 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-crmqj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/healthz\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.440817 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.32:8080/healthz\": dial tcp 10.217.0.32:8080: connect: connection refused" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.449490 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" event={"ID":"8b5eab9d-beb2-4574-bd14-831322b154d4","Type":"ContainerStarted","Data":"c279b61b68e4b39aabbc89b99f1b9af34b142151bafa28e3dccd87e4268108d3"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.449631 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" event={"ID":"8b5eab9d-beb2-4574-bd14-831322b154d4","Type":"ContainerStarted","Data":"279f063467737d5078ff91243fdebd80079f40916f8d25246ce21fbea345cc9a"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.450644 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.453361 4644 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-h6zvh container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" start-of-body= Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.453485 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" podUID="8b5eab9d-beb2-4574-bd14-831322b154d4" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.462467 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-h4qgz" podStartSLOduration=6.46245602 podStartE2EDuration="6.46245602s" podCreationTimestamp="2025-12-13 06:47:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.461678447 +0000 UTC m=+139.676629280" watchObservedRunningTime="2025-12-13 06:47:57.46245602 +0000 UTC m=+139.677406853" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.466819 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" event={"ID":"8eafd0da-eec5-48e4-9ff3-0e8367d338c2","Type":"ContainerStarted","Data":"f2b7f6d26992aac369addeff3445aaeed965c500bc7afdd6f019493f65973e69"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.481004 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wfsz4" event={"ID":"ac23aa18-ed6d-4ea9-b720-aa7ccb164459","Type":"ContainerStarted","Data":"e7f2007f498537b84451941a1b217de201026e52e67797c93083af130ea34ead"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.493627 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-cvf68" event={"ID":"3efe95b6-b728-4044-b3e7-076482b73a63","Type":"ContainerStarted","Data":"ae40556bccdf6bee5b2be03ea3b40aa1b6c1d26fe15c492f8a801dbe081ae20f"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.496193 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" podStartSLOduration=121.496180294 podStartE2EDuration="2m1.496180294s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.494532806 +0000 UTC m=+139.709483639" watchObservedRunningTime="2025-12-13 06:47:57.496180294 +0000 UTC m=+139.711131128" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.513662 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" podStartSLOduration=121.51364497 podStartE2EDuration="2m1.51364497s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.511735348 +0000 UTC m=+139.726686181" watchObservedRunningTime="2025-12-13 06:47:57.51364497 +0000 UTC m=+139.728595803" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.514889 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:57 crc kubenswrapper[4644]: E1213 06:47:57.515275 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:58.015256039 +0000 UTC m=+140.230206872 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.515580 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:57 crc kubenswrapper[4644]: E1213 06:47:57.515946 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:58.01593691 +0000 UTC m=+140.230887744 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.521664 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9nmb7" event={"ID":"efc4a9ab-d47c-4425-aecf-746386c5ee97","Type":"ContainerStarted","Data":"1f280a6c9d7ea4e0a7f747105df5a18b71ce16316dbd92486b69736aae0b6b52"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.521816 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9nmb7" event={"ID":"efc4a9ab-d47c-4425-aecf-746386c5ee97","Type":"ContainerStarted","Data":"40913ff977d8700e37bf4c01f237c52c98ed13ae59dcec275e92c8ad2138c15d"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.556605 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:47:57 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:47:57 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:47:57 crc kubenswrapper[4644]: healthz check failed Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.556995 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.557516 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77" event={"ID":"17cbd4ea-f855-40e4-aa42-fa46efa2a393","Type":"ContainerStarted","Data":"a78da6ca850a9aed1a21534c0b2cf0d98bdf045bc7515910b74e7e44f992286d"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.557615 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77" event={"ID":"17cbd4ea-f855-40e4-aa42-fa46efa2a393","Type":"ContainerStarted","Data":"b11142409b514441c7cb421c080d348ae5f034545209dd40121fe338f45accc7"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.557674 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77" event={"ID":"17cbd4ea-f855-40e4-aa42-fa46efa2a393","Type":"ContainerStarted","Data":"2c25b278b304695ff2c5d27a98947d50af96816fabe834c42a976ad7ff4a79d8"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.557902 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.569239 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" event={"ID":"a316aeb4-d464-4fc3-b4b7-7df5e00e68be","Type":"ContainerStarted","Data":"719aacbaa61cf44187da56f5b1d6ad68e62b7ebc92e58d560f61e8a77a837571"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.574282 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-d7jk5" event={"ID":"773a896a-a43c-4a09-a6e7-42063bf34606","Type":"ContainerStarted","Data":"ba619ba9811931e84d9fe48b35d69c495b9853cbde36da5d5e4e4fb7b3dfdbf3"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.577778 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" podStartSLOduration=121.577764437 podStartE2EDuration="2m1.577764437s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.543106096 +0000 UTC m=+139.758056929" watchObservedRunningTime="2025-12-13 06:47:57.577764437 +0000 UTC m=+139.792715270" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.590663 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" event={"ID":"16dd8123-4c76-4fa6-8792-d1fe4a68b782","Type":"ContainerStarted","Data":"11550f068c1499082819d2728375e1dc678eb6e2b912242b792592e11d6a2c9f"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.603409 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-d7jk5" podStartSLOduration=121.603393687 podStartE2EDuration="2m1.603393687s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.6032781 +0000 UTC m=+139.818228933" watchObservedRunningTime="2025-12-13 06:47:57.603393687 +0000 UTC m=+139.818344520" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.604646 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9nmb7" podStartSLOduration=121.604635082 podStartE2EDuration="2m1.604635082s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.578229682 +0000 UTC m=+139.793180515" watchObservedRunningTime="2025-12-13 06:47:57.604635082 +0000 UTC m=+139.819585915" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.608197 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" event={"ID":"3e906e3d-d501-43fd-baae-8c5b606c7c58","Type":"ContainerStarted","Data":"933f317abf3562e1916f3bde5f909aeb17db157613e3341059d54c436e41f09e"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.616320 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:57 crc kubenswrapper[4644]: E1213 06:47:57.616599 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:58.116565609 +0000 UTC m=+140.331516443 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.616859 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:57 crc kubenswrapper[4644]: E1213 06:47:57.617323 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:58.117312214 +0000 UTC m=+140.332263047 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.627770 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-hn5l6" event={"ID":"cf016b2d-7155-49d8-9675-567dc1cb1dfe","Type":"ContainerStarted","Data":"930d021effc417d92d3f058e18bbf65bd47b6af1832b59d536dd92f4946b630f"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.628384 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-hn5l6" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.628613 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-hn5l6" event={"ID":"cf016b2d-7155-49d8-9675-567dc1cb1dfe","Type":"ContainerStarted","Data":"99446128e9274b6e3ecf9f1f0b51ca8d6841eadd8497df9d07e01827e203553b"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.637095 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-l4cnx" event={"ID":"a8b48e2c-cb80-4c41-bd8b-8a57190f5cca","Type":"ContainerStarted","Data":"c60ad138cb063d9ee675f233c0cbb2aae0ff60d37d29ca29b7faab9718b66caf"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.639646 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" podStartSLOduration=121.63963246 podStartE2EDuration="2m1.63963246s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.639334831 +0000 UTC m=+139.854285664" watchObservedRunningTime="2025-12-13 06:47:57.63963246 +0000 UTC m=+139.854583294" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.657629 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm" event={"ID":"460ee6fa-709e-4281-b1a7-3ca5f795d684","Type":"ContainerStarted","Data":"79e02abee054f8f84bb9c2f361937569527ae5fb9ae32d7d58f962b952d65c3c"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.659737 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" event={"ID":"48b60af5-ee30-4605-8cbf-01c379981b3e","Type":"ContainerStarted","Data":"871b449276143a28faaf6845356d4e93cf695d2e025842548b2dfb2aa54b0f76"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.659772 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" event={"ID":"48b60af5-ee30-4605-8cbf-01c379981b3e","Type":"ContainerStarted","Data":"6ca647a84670271d16a62402b3164b5a5bff67bd2f3c728d55e42d71d079899c"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.660787 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.662572 4644 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-kvg6b container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.662609 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" podUID="48b60af5-ee30-4605-8cbf-01c379981b3e" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.665638 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77" podStartSLOduration=121.665625365 podStartE2EDuration="2m1.665625365s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.662350827 +0000 UTC m=+139.877301660" watchObservedRunningTime="2025-12-13 06:47:57.665625365 +0000 UTC m=+139.880576188" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.668622 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25" event={"ID":"358d9360-7d08-47de-b8f5-81205f934c81","Type":"ContainerStarted","Data":"35e96bfb9fd20d624e1809a0a9cf9a8c5fd15bbabe09773a2c6702b2f811606e"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.668654 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25" event={"ID":"358d9360-7d08-47de-b8f5-81205f934c81","Type":"ContainerStarted","Data":"7111bab2fc7772e9331bf2d76ac3f38559887d2550b16afce422f5614ece4a75"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.683809 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" event={"ID":"09933a3a-4eb6-466f-af47-f3d4f6bb2709","Type":"ContainerStarted","Data":"17c367d89c3e63fb6d5a1bfb887c230cbcea746f36469f6f9cc9d7c38cf79e5b"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.683866 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" event={"ID":"09933a3a-4eb6-466f-af47-f3d4f6bb2709","Type":"ContainerStarted","Data":"8d8afcf621092f598ecdcb4d0bbe697b55e80576f99d354b990b2e1e288b2cb5"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.683877 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" event={"ID":"09933a3a-4eb6-466f-af47-f3d4f6bb2709","Type":"ContainerStarted","Data":"8c4a5234af0f703c5b31cb447e59105e9d0467fc30094cfb5a3980c4089ada3c"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.693126 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mrjxs" event={"ID":"29418836-a9b6-42c2-90b1-755ff73fe3fa","Type":"ContainerStarted","Data":"7022a86a21bfff790c2a7b3c8a1c56726d80487367ad227d637752d317b47ebf"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.696004 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" event={"ID":"b9059962-adc6-4278-aead-d07a310b9776","Type":"ContainerStarted","Data":"d620f3ce6df6c6845844b641ef12589c0a90ca0dc710ed9228d92470c1bfdff6"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.707142 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.717695 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz" event={"ID":"e05235cb-84ad-45ae-9fd3-02803c0fd752","Type":"ContainerStarted","Data":"a12147d912934ba9a62d1bf11b029373174c24c170c88b7efe7f6c4202da39ff"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.717734 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz" event={"ID":"e05235cb-84ad-45ae-9fd3-02803c0fd752","Type":"ContainerStarted","Data":"a1a50078146068b12fdcd25a3d2ad2ab23e2b769adcae1548f236c6c6d84b35b"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.717746 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz" event={"ID":"e05235cb-84ad-45ae-9fd3-02803c0fd752","Type":"ContainerStarted","Data":"d451985b3e328abe82abbac38a5243d6892ffde6044849ff6da3cfdec9093782"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.721459 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-wqmsp" event={"ID":"baa88468-def9-4705-8a15-a97e14eba154","Type":"ContainerStarted","Data":"f02ddd330978bd33977e1bb4b20acdbb44b867ce303464e7e87a8e3733f4e56f"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.721503 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-wqmsp" event={"ID":"baa88468-def9-4705-8a15-a97e14eba154","Type":"ContainerStarted","Data":"7ba796231208e364226072bf5b2c7060d43c020a17511130f32f78d39b3887fa"} Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.721971 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:57 crc kubenswrapper[4644]: E1213 06:47:57.722904 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:58.222888648 +0000 UTC m=+140.437839482 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.735139 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.739136 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-l4cnx" podStartSLOduration=121.739120956 podStartE2EDuration="2m1.739120956s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.738494748 +0000 UTC m=+139.953445581" watchObservedRunningTime="2025-12-13 06:47:57.739120956 +0000 UTC m=+139.954071789" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.739771 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgprm" podStartSLOduration=121.739764055 podStartE2EDuration="2m1.739764055s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.684830343 +0000 UTC m=+139.899781176" watchObservedRunningTime="2025-12-13 06:47:57.739764055 +0000 UTC m=+139.954714889" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.741646 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qbqgt" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.762870 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-m24dn" podStartSLOduration=121.762851265 podStartE2EDuration="2m1.762851265s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.760779819 +0000 UTC m=+139.975730662" watchObservedRunningTime="2025-12-13 06:47:57.762851265 +0000 UTC m=+139.977802097" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.812272 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-hn5l6" podStartSLOduration=7.8122486349999996 podStartE2EDuration="7.812248635s" podCreationTimestamp="2025-12-13 06:47:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.803863275 +0000 UTC m=+140.018814107" watchObservedRunningTime="2025-12-13 06:47:57.812248635 +0000 UTC m=+140.027199468" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.826668 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:57 crc kubenswrapper[4644]: E1213 06:47:57.835037 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:58.335017786 +0000 UTC m=+140.549968618 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.847321 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" podStartSLOduration=121.847284296 podStartE2EDuration="2m1.847284296s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.845910552 +0000 UTC m=+140.060861385" watchObservedRunningTime="2025-12-13 06:47:57.847284296 +0000 UTC m=+140.062235128" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.883670 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wdd25" podStartSLOduration=121.88363535 podStartE2EDuration="2m1.88363535s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.883091206 +0000 UTC m=+140.098042040" watchObservedRunningTime="2025-12-13 06:47:57.88363535 +0000 UTC m=+140.098586183" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.887862 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" podStartSLOduration=121.8878471 podStartE2EDuration="2m1.8878471s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.862134424 +0000 UTC m=+140.077085257" watchObservedRunningTime="2025-12-13 06:47:57.8878471 +0000 UTC m=+140.102797933" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.911496 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gftmb" podStartSLOduration=121.911429591 podStartE2EDuration="2m1.911429591s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.910902731 +0000 UTC m=+140.125853564" watchObservedRunningTime="2025-12-13 06:47:57.911429591 +0000 UTC m=+140.126380424" Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.928222 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:57 crc kubenswrapper[4644]: E1213 06:47:57.928752 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:58.428736139 +0000 UTC m=+140.643686973 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:57 crc kubenswrapper[4644]: I1213 06:47:57.991893 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mrjxs" podStartSLOduration=121.991872779 podStartE2EDuration="2m1.991872779s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:57.985687125 +0000 UTC m=+140.200637958" watchObservedRunningTime="2025-12-13 06:47:57.991872779 +0000 UTC m=+140.206823611" Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.034288 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:58 crc kubenswrapper[4644]: E1213 06:47:58.034803 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:58.534791585 +0000 UTC m=+140.749742418 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.057883 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.057944 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.081010 4644 patch_prober.go:28] interesting pod/apiserver-76f77b778f-8ktwh container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 13 06:47:58 crc kubenswrapper[4644]: [+]log ok Dec 13 06:47:58 crc kubenswrapper[4644]: [+]etcd ok Dec 13 06:47:58 crc kubenswrapper[4644]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 13 06:47:58 crc kubenswrapper[4644]: [+]poststarthook/generic-apiserver-start-informers ok Dec 13 06:47:58 crc kubenswrapper[4644]: [+]poststarthook/max-in-flight-filter ok Dec 13 06:47:58 crc kubenswrapper[4644]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 13 06:47:58 crc kubenswrapper[4644]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 13 06:47:58 crc kubenswrapper[4644]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 13 06:47:58 crc kubenswrapper[4644]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 13 06:47:58 crc kubenswrapper[4644]: [+]poststarthook/project.openshift.io-projectcache ok Dec 13 06:47:58 crc kubenswrapper[4644]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 13 06:47:58 crc kubenswrapper[4644]: [+]poststarthook/openshift.io-startinformers ok Dec 13 06:47:58 crc kubenswrapper[4644]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 13 06:47:58 crc kubenswrapper[4644]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 13 06:47:58 crc kubenswrapper[4644]: livez check failed Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.081336 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" podUID="3e906e3d-d501-43fd-baae-8c5b606c7c58" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.135706 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:58 crc kubenswrapper[4644]: E1213 06:47:58.136247 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:58.636231871 +0000 UTC m=+140.851182704 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.151190 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-wqmsp" podStartSLOduration=122.151172349 podStartE2EDuration="2m2.151172349s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:58.148628866 +0000 UTC m=+140.363579698" watchObservedRunningTime="2025-12-13 06:47:58.151172349 +0000 UTC m=+140.366123182" Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.179316 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.182740 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.237595 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:58 crc kubenswrapper[4644]: E1213 06:47:58.238040 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:58.738026733 +0000 UTC m=+140.952977566 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.339383 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:58 crc kubenswrapper[4644]: E1213 06:47:58.339881 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:58.839867041 +0000 UTC m=+141.054817873 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.443234 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:58 crc kubenswrapper[4644]: E1213 06:47:58.443627 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:58.943610108 +0000 UTC m=+141.158560941 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.544681 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:58 crc kubenswrapper[4644]: E1213 06:47:58.545238 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.045223268 +0000 UTC m=+141.260174101 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.550969 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:47:58 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:47:58 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:47:58 crc kubenswrapper[4644]: healthz check failed Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.551029 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.646561 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:58 crc kubenswrapper[4644]: E1213 06:47:58.646848 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.146836378 +0000 UTC m=+141.361787212 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.693119 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.726378 4644 generic.go:334] "Generic (PLEG): container finished" podID="f8aab518-1735-49f5-98af-68d01e096132" containerID="556da2544417e0c0d8dc849dbaf7bf0d3727f9f9fb668868c86ebbe5af570a7c" exitCode=0 Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.726454 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" event={"ID":"f8aab518-1735-49f5-98af-68d01e096132","Type":"ContainerDied","Data":"556da2544417e0c0d8dc849dbaf7bf0d3727f9f9fb668868c86ebbe5af570a7c"} Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.728886 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" event={"ID":"8eafd0da-eec5-48e4-9ff3-0e8367d338c2","Type":"ContainerStarted","Data":"94637d2a299ade980fce7a84df96e39c2c764d91062094c5217cd9c092cf281b"} Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.728934 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" event={"ID":"8eafd0da-eec5-48e4-9ff3-0e8367d338c2","Type":"ContainerStarted","Data":"5594c351bdd70561399c3e5380bb32d4bd604e0c615f656cf737e2a048b49194"} Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.728953 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" event={"ID":"8eafd0da-eec5-48e4-9ff3-0e8367d338c2","Type":"ContainerStarted","Data":"7de65a88a5608e395f224bd1bb76ef9ae21d4bf213a64a57c0c2fc3f6453d242"} Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.730610 4644 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-crmqj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/healthz\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.730661 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.32:8080/healthz\": dial tcp 10.217.0.32:8080: connect: connection refused" Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.733566 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kvg6b" Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.734169 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-j5lrz" podStartSLOduration=122.73415675 podStartE2EDuration="2m2.73415675s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:58.182732262 +0000 UTC m=+140.397683095" watchObservedRunningTime="2025-12-13 06:47:58.73415675 +0000 UTC m=+140.949107583" Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.742266 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-tbrk2" Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.747801 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:58 crc kubenswrapper[4644]: E1213 06:47:58.747949 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.247928711 +0000 UTC m=+141.462879543 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.748343 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:58 crc kubenswrapper[4644]: E1213 06:47:58.748700 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.248687077 +0000 UTC m=+141.463637910 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.793479 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-hmhwd" podStartSLOduration=8.793456974 podStartE2EDuration="8.793456974s" podCreationTimestamp="2025-12-13 06:47:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:47:58.792276644 +0000 UTC m=+141.007227477" watchObservedRunningTime="2025-12-13 06:47:58.793456974 +0000 UTC m=+141.008407797" Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.849224 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:58 crc kubenswrapper[4644]: E1213 06:47:58.849624 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.349544025 +0000 UTC m=+141.564494858 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.852326 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:58 crc kubenswrapper[4644]: E1213 06:47:58.854235 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.354218477 +0000 UTC m=+141.569169300 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.870885 4644 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.957942 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:58 crc kubenswrapper[4644]: E1213 06:47:58.958116 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.458084314 +0000 UTC m=+141.673035148 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:58 crc kubenswrapper[4644]: I1213 06:47:58.958402 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:58 crc kubenswrapper[4644]: E1213 06:47:58.958729 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.45872015 +0000 UTC m=+141.673670983 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.059925 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:59 crc kubenswrapper[4644]: E1213 06:47:59.060266 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.560239334 +0000 UTC m=+141.775190167 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.162088 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:59 crc kubenswrapper[4644]: E1213 06:47:59.162622 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.662603487 +0000 UTC m=+141.877554320 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.263595 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:59 crc kubenswrapper[4644]: E1213 06:47:59.263799 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.763770789 +0000 UTC m=+141.978721622 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.264248 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:59 crc kubenswrapper[4644]: E1213 06:47:59.264653 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.764641847 +0000 UTC m=+141.979592681 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.365675 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:59 crc kubenswrapper[4644]: E1213 06:47:59.365865 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.865836662 +0000 UTC m=+142.080787495 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.366110 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:59 crc kubenswrapper[4644]: E1213 06:47:59.366457 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.866432412 +0000 UTC m=+142.081383246 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.467655 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:59 crc kubenswrapper[4644]: E1213 06:47:59.467955 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.967923383 +0000 UTC m=+142.182874216 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.468210 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:59 crc kubenswrapper[4644]: E1213 06:47:59.468651 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 06:47:59.968639691 +0000 UTC m=+142.183590523 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-2rfck" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.507022 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-h6zvh" Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.529772 4644 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-13T06:47:58.870911281Z","Handler":null,"Name":""} Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.532284 4644 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.532435 4644 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.554607 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:47:59 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:47:59 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:47:59 crc kubenswrapper[4644]: healthz check failed Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.554952 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.569118 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.581332 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.673539 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.676315 4644 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.676368 4644 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.699821 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-2rfck\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.738029 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.941120 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.976572 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2bmvc"] Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.977420 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.980111 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 13 06:47:59 crc kubenswrapper[4644]: I1213 06:47:59.990970 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2bmvc"] Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.031690 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.079348 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b355a1b-28e0-462c-a1ef-43eea6341565-utilities\") pod \"community-operators-2bmvc\" (UID: \"4b355a1b-28e0-462c-a1ef-43eea6341565\") " pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.079621 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b355a1b-28e0-462c-a1ef-43eea6341565-catalog-content\") pod \"community-operators-2bmvc\" (UID: \"4b355a1b-28e0-462c-a1ef-43eea6341565\") " pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.079653 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5cjq\" (UniqueName: \"kubernetes.io/projected/4b355a1b-28e0-462c-a1ef-43eea6341565-kube-api-access-x5cjq\") pod \"community-operators-2bmvc\" (UID: \"4b355a1b-28e0-462c-a1ef-43eea6341565\") " pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.175764 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2rfck"] Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.177342 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-54mm5"] Dec 13 06:48:00 crc kubenswrapper[4644]: E1213 06:48:00.177550 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8aab518-1735-49f5-98af-68d01e096132" containerName="collect-profiles" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.177570 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8aab518-1735-49f5-98af-68d01e096132" containerName="collect-profiles" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.177683 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8aab518-1735-49f5-98af-68d01e096132" containerName="collect-profiles" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.178290 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.180272 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f8aab518-1735-49f5-98af-68d01e096132-secret-volume\") pod \"f8aab518-1735-49f5-98af-68d01e096132\" (UID: \"f8aab518-1735-49f5-98af-68d01e096132\") " Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.181428 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f8aab518-1735-49f5-98af-68d01e096132-config-volume\") pod \"f8aab518-1735-49f5-98af-68d01e096132\" (UID: \"f8aab518-1735-49f5-98af-68d01e096132\") " Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.181526 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.181581 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsfjs\" (UniqueName: \"kubernetes.io/projected/f8aab518-1735-49f5-98af-68d01e096132-kube-api-access-lsfjs\") pod \"f8aab518-1735-49f5-98af-68d01e096132\" (UID: \"f8aab518-1735-49f5-98af-68d01e096132\") " Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.182078 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8aab518-1735-49f5-98af-68d01e096132-config-volume" (OuterVolumeSpecName: "config-volume") pod "f8aab518-1735-49f5-98af-68d01e096132" (UID: "f8aab518-1735-49f5-98af-68d01e096132"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.182626 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b355a1b-28e0-462c-a1ef-43eea6341565-catalog-content\") pod \"community-operators-2bmvc\" (UID: \"4b355a1b-28e0-462c-a1ef-43eea6341565\") " pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.182657 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5cjq\" (UniqueName: \"kubernetes.io/projected/4b355a1b-28e0-462c-a1ef-43eea6341565-kube-api-access-x5cjq\") pod \"community-operators-2bmvc\" (UID: \"4b355a1b-28e0-462c-a1ef-43eea6341565\") " pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.182739 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b355a1b-28e0-462c-a1ef-43eea6341565-utilities\") pod \"community-operators-2bmvc\" (UID: \"4b355a1b-28e0-462c-a1ef-43eea6341565\") " pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.182855 4644 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f8aab518-1735-49f5-98af-68d01e096132-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.183378 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b355a1b-28e0-462c-a1ef-43eea6341565-utilities\") pod \"community-operators-2bmvc\" (UID: \"4b355a1b-28e0-462c-a1ef-43eea6341565\") " pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.183972 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b355a1b-28e0-462c-a1ef-43eea6341565-catalog-content\") pod \"community-operators-2bmvc\" (UID: \"4b355a1b-28e0-462c-a1ef-43eea6341565\") " pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.185953 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8aab518-1735-49f5-98af-68d01e096132-kube-api-access-lsfjs" (OuterVolumeSpecName: "kube-api-access-lsfjs") pod "f8aab518-1735-49f5-98af-68d01e096132" (UID: "f8aab518-1735-49f5-98af-68d01e096132"). InnerVolumeSpecName "kube-api-access-lsfjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.186170 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8aab518-1735-49f5-98af-68d01e096132-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f8aab518-1735-49f5-98af-68d01e096132" (UID: "f8aab518-1735-49f5-98af-68d01e096132"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.200752 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-54mm5"] Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.201830 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5cjq\" (UniqueName: \"kubernetes.io/projected/4b355a1b-28e0-462c-a1ef-43eea6341565-kube-api-access-x5cjq\") pod \"community-operators-2bmvc\" (UID: \"4b355a1b-28e0-462c-a1ef-43eea6341565\") " pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.283604 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10291551-2baf-4271-bc49-6a40e5ceb94b-catalog-content\") pod \"certified-operators-54mm5\" (UID: \"10291551-2baf-4271-bc49-6a40e5ceb94b\") " pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.283668 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10291551-2baf-4271-bc49-6a40e5ceb94b-utilities\") pod \"certified-operators-54mm5\" (UID: \"10291551-2baf-4271-bc49-6a40e5ceb94b\") " pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.284311 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxm94\" (UniqueName: \"kubernetes.io/projected/10291551-2baf-4271-bc49-6a40e5ceb94b-kube-api-access-hxm94\") pod \"certified-operators-54mm5\" (UID: \"10291551-2baf-4271-bc49-6a40e5ceb94b\") " pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.284485 4644 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f8aab518-1735-49f5-98af-68d01e096132-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.284507 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsfjs\" (UniqueName: \"kubernetes.io/projected/f8aab518-1735-49f5-98af-68d01e096132-kube-api-access-lsfjs\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.299093 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.382230 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-g2gkk"] Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.385882 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10291551-2baf-4271-bc49-6a40e5ceb94b-catalog-content\") pod \"certified-operators-54mm5\" (UID: \"10291551-2baf-4271-bc49-6a40e5ceb94b\") " pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.385936 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10291551-2baf-4271-bc49-6a40e5ceb94b-utilities\") pod \"certified-operators-54mm5\" (UID: \"10291551-2baf-4271-bc49-6a40e5ceb94b\") " pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.386093 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxm94\" (UniqueName: \"kubernetes.io/projected/10291551-2baf-4271-bc49-6a40e5ceb94b-kube-api-access-hxm94\") pod \"certified-operators-54mm5\" (UID: \"10291551-2baf-4271-bc49-6a40e5ceb94b\") " pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.387011 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10291551-2baf-4271-bc49-6a40e5ceb94b-catalog-content\") pod \"certified-operators-54mm5\" (UID: \"10291551-2baf-4271-bc49-6a40e5ceb94b\") " pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.387247 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10291551-2baf-4271-bc49-6a40e5ceb94b-utilities\") pod \"certified-operators-54mm5\" (UID: \"10291551-2baf-4271-bc49-6a40e5ceb94b\") " pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.388140 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.403210 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.404436 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g2gkk"] Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.415777 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxm94\" (UniqueName: \"kubernetes.io/projected/10291551-2baf-4271-bc49-6a40e5ceb94b-kube-api-access-hxm94\") pod \"certified-operators-54mm5\" (UID: \"10291551-2baf-4271-bc49-6a40e5ceb94b\") " pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.465623 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2bmvc"] Dec 13 06:48:00 crc kubenswrapper[4644]: W1213 06:48:00.470266 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b355a1b_28e0_462c_a1ef_43eea6341565.slice/crio-3f13444f58d076d4fa403284d876796fd46ed75cedf126c2008cc73f53a7a35f WatchSource:0}: Error finding container 3f13444f58d076d4fa403284d876796fd46ed75cedf126c2008cc73f53a7a35f: Status 404 returned error can't find the container with id 3f13444f58d076d4fa403284d876796fd46ed75cedf126c2008cc73f53a7a35f Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.488031 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvvgd\" (UniqueName: \"kubernetes.io/projected/a642cd12-49f1-4784-86ce-05f0291c9049-kube-api-access-kvvgd\") pod \"community-operators-g2gkk\" (UID: \"a642cd12-49f1-4784-86ce-05f0291c9049\") " pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.488114 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a642cd12-49f1-4784-86ce-05f0291c9049-utilities\") pod \"community-operators-g2gkk\" (UID: \"a642cd12-49f1-4784-86ce-05f0291c9049\") " pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.488198 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a642cd12-49f1-4784-86ce-05f0291c9049-catalog-content\") pod \"community-operators-g2gkk\" (UID: \"a642cd12-49f1-4784-86ce-05f0291c9049\") " pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.505701 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.551431 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:48:00 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:48:00 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:48:00 crc kubenswrapper[4644]: healthz check failed Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.551551 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.579036 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-szmwp"] Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.580064 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.590652 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvvgd\" (UniqueName: \"kubernetes.io/projected/a642cd12-49f1-4784-86ce-05f0291c9049-kube-api-access-kvvgd\") pod \"community-operators-g2gkk\" (UID: \"a642cd12-49f1-4784-86ce-05f0291c9049\") " pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.590708 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a642cd12-49f1-4784-86ce-05f0291c9049-utilities\") pod \"community-operators-g2gkk\" (UID: \"a642cd12-49f1-4784-86ce-05f0291c9049\") " pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.590754 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a642cd12-49f1-4784-86ce-05f0291c9049-catalog-content\") pod \"community-operators-g2gkk\" (UID: \"a642cd12-49f1-4784-86ce-05f0291c9049\") " pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.591216 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a642cd12-49f1-4784-86ce-05f0291c9049-catalog-content\") pod \"community-operators-g2gkk\" (UID: \"a642cd12-49f1-4784-86ce-05f0291c9049\") " pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.591712 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a642cd12-49f1-4784-86ce-05f0291c9049-utilities\") pod \"community-operators-g2gkk\" (UID: \"a642cd12-49f1-4784-86ce-05f0291c9049\") " pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.595967 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-szmwp"] Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.610893 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvvgd\" (UniqueName: \"kubernetes.io/projected/a642cd12-49f1-4784-86ce-05f0291c9049-kube-api-access-kvvgd\") pod \"community-operators-g2gkk\" (UID: \"a642cd12-49f1-4784-86ce-05f0291c9049\") " pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.687085 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-54mm5"] Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.692243 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-catalog-content\") pod \"certified-operators-szmwp\" (UID: \"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb\") " pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.692330 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lfwl\" (UniqueName: \"kubernetes.io/projected/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-kube-api-access-5lfwl\") pod \"certified-operators-szmwp\" (UID: \"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb\") " pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.692373 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-utilities\") pod \"certified-operators-szmwp\" (UID: \"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb\") " pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.704040 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.743258 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" event={"ID":"79103034-e1a4-44b1-bffc-e9edc76da393","Type":"ContainerStarted","Data":"a0ebfd010b18fafd67ae225a36055c020036ac2e73f433517e061c020436b561"} Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.743338 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" event={"ID":"79103034-e1a4-44b1-bffc-e9edc76da393","Type":"ContainerStarted","Data":"356d12be20341c27ba13054f30a962732de0049488d0ad48156c6110f84479c7"} Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.743377 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.746797 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-54mm5" event={"ID":"10291551-2baf-4271-bc49-6a40e5ceb94b","Type":"ContainerStarted","Data":"cf7fec9b8e68918e062be8ca425fd99a36eba9b696859bdcdf2a74796cb14711"} Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.755996 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" event={"ID":"f8aab518-1735-49f5-98af-68d01e096132","Type":"ContainerDied","Data":"e0b5e0a611f63aba48e8177e2c5a63451addbaa7f14a79c88162310ef7468dd6"} Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.756051 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0b5e0a611f63aba48e8177e2c5a63451addbaa7f14a79c88162310ef7468dd6" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.756017 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.761874 4644 generic.go:334] "Generic (PLEG): container finished" podID="4b355a1b-28e0-462c-a1ef-43eea6341565" containerID="f85adda5ac263603f5f8f57c36f1ae3a7d878908d1e8ab35b766e5b6df80d534" exitCode=0 Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.762653 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" podStartSLOduration=124.762175264 podStartE2EDuration="2m4.762175264s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:48:00.759404554 +0000 UTC m=+142.974355386" watchObservedRunningTime="2025-12-13 06:48:00.762175264 +0000 UTC m=+142.977126097" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.762899 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2bmvc" event={"ID":"4b355a1b-28e0-462c-a1ef-43eea6341565","Type":"ContainerDied","Data":"f85adda5ac263603f5f8f57c36f1ae3a7d878908d1e8ab35b766e5b6df80d534"} Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.762939 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2bmvc" event={"ID":"4b355a1b-28e0-462c-a1ef-43eea6341565","Type":"ContainerStarted","Data":"3f13444f58d076d4fa403284d876796fd46ed75cedf126c2008cc73f53a7a35f"} Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.764433 4644 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.793624 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-catalog-content\") pod \"certified-operators-szmwp\" (UID: \"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb\") " pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.793672 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lfwl\" (UniqueName: \"kubernetes.io/projected/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-kube-api-access-5lfwl\") pod \"certified-operators-szmwp\" (UID: \"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb\") " pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.793709 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-utilities\") pod \"certified-operators-szmwp\" (UID: \"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb\") " pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.795248 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-catalog-content\") pod \"certified-operators-szmwp\" (UID: \"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb\") " pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.796114 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-utilities\") pod \"certified-operators-szmwp\" (UID: \"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb\") " pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.816616 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lfwl\" (UniqueName: \"kubernetes.io/projected/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-kube-api-access-5lfwl\") pod \"certified-operators-szmwp\" (UID: \"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb\") " pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.894225 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g2gkk"] Dec 13 06:48:00 crc kubenswrapper[4644]: W1213 06:48:00.906268 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda642cd12_49f1_4784_86ce_05f0291c9049.slice/crio-fc5338834e455a6613b7666d3c3a3f92171ed04711a287d83167524be8979b49 WatchSource:0}: Error finding container fc5338834e455a6613b7666d3c3a3f92171ed04711a287d83167524be8979b49: Status 404 returned error can't find the container with id fc5338834e455a6613b7666d3c3a3f92171ed04711a287d83167524be8979b49 Dec 13 06:48:00 crc kubenswrapper[4644]: I1213 06:48:00.942390 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.094881 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-szmwp"] Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.552040 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:48:01 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:48:01 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:48:01 crc kubenswrapper[4644]: healthz check failed Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.552104 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.769761 4644 generic.go:334] "Generic (PLEG): container finished" podID="b8b426e8-8b8b-4168-afaa-a5cbb6752fdb" containerID="8fab03e287540465f8b81676df1426c08c76024946b77970fff87acdf4366a3c" exitCode=0 Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.769873 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-szmwp" event={"ID":"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb","Type":"ContainerDied","Data":"8fab03e287540465f8b81676df1426c08c76024946b77970fff87acdf4366a3c"} Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.769921 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-szmwp" event={"ID":"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb","Type":"ContainerStarted","Data":"0832af65481d4130cf0a515ccd58449dcb0ae02db5ad4914a6987bfc1e4e38ce"} Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.771572 4644 generic.go:334] "Generic (PLEG): container finished" podID="10291551-2baf-4271-bc49-6a40e5ceb94b" containerID="1f91ca4c5f67f02fc7e4530efcbcbcd6aa336b3c38a4008204ff8a4118d8b746" exitCode=0 Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.771611 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-54mm5" event={"ID":"10291551-2baf-4271-bc49-6a40e5ceb94b","Type":"ContainerDied","Data":"1f91ca4c5f67f02fc7e4530efcbcbcd6aa336b3c38a4008204ff8a4118d8b746"} Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.772932 4644 generic.go:334] "Generic (PLEG): container finished" podID="a642cd12-49f1-4784-86ce-05f0291c9049" containerID="58b1d85a2528df4ee4f085b910fb2cf4802a703a33ddab1f9c58a125290710d7" exitCode=0 Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.772986 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2gkk" event={"ID":"a642cd12-49f1-4784-86ce-05f0291c9049","Type":"ContainerDied","Data":"58b1d85a2528df4ee4f085b910fb2cf4802a703a33ddab1f9c58a125290710d7"} Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.773014 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2gkk" event={"ID":"a642cd12-49f1-4784-86ce-05f0291c9049","Type":"ContainerStarted","Data":"fc5338834e455a6613b7666d3c3a3f92171ed04711a287d83167524be8979b49"} Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.962293 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.967338 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.967877 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.969918 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.970716 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.975330 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pngbm"] Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.979604 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.981491 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 13 06:48:01 crc kubenswrapper[4644]: I1213 06:48:01.995876 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pngbm"] Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.113319 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c516743-8030-4b76-a40b-3439c1bf3c30-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1c516743-8030-4b76-a40b-3439c1bf3c30\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.113374 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b4d6920-0938-4fcf-a825-bfaec69da684-catalog-content\") pod \"redhat-marketplace-pngbm\" (UID: \"8b4d6920-0938-4fcf-a825-bfaec69da684\") " pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.113433 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c516743-8030-4b76-a40b-3439c1bf3c30-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1c516743-8030-4b76-a40b-3439c1bf3c30\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.113478 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b4d6920-0938-4fcf-a825-bfaec69da684-utilities\") pod \"redhat-marketplace-pngbm\" (UID: \"8b4d6920-0938-4fcf-a825-bfaec69da684\") " pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.113571 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8km7q\" (UniqueName: \"kubernetes.io/projected/8b4d6920-0938-4fcf-a825-bfaec69da684-kube-api-access-8km7q\") pod \"redhat-marketplace-pngbm\" (UID: \"8b4d6920-0938-4fcf-a825-bfaec69da684\") " pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.214564 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c516743-8030-4b76-a40b-3439c1bf3c30-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1c516743-8030-4b76-a40b-3439c1bf3c30\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.214614 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b4d6920-0938-4fcf-a825-bfaec69da684-catalog-content\") pod \"redhat-marketplace-pngbm\" (UID: \"8b4d6920-0938-4fcf-a825-bfaec69da684\") " pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.214673 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c516743-8030-4b76-a40b-3439c1bf3c30-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1c516743-8030-4b76-a40b-3439c1bf3c30\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.214693 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b4d6920-0938-4fcf-a825-bfaec69da684-utilities\") pod \"redhat-marketplace-pngbm\" (UID: \"8b4d6920-0938-4fcf-a825-bfaec69da684\") " pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.214716 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8km7q\" (UniqueName: \"kubernetes.io/projected/8b4d6920-0938-4fcf-a825-bfaec69da684-kube-api-access-8km7q\") pod \"redhat-marketplace-pngbm\" (UID: \"8b4d6920-0938-4fcf-a825-bfaec69da684\") " pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.215735 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b4d6920-0938-4fcf-a825-bfaec69da684-catalog-content\") pod \"redhat-marketplace-pngbm\" (UID: \"8b4d6920-0938-4fcf-a825-bfaec69da684\") " pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.215782 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c516743-8030-4b76-a40b-3439c1bf3c30-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1c516743-8030-4b76-a40b-3439c1bf3c30\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.216787 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b4d6920-0938-4fcf-a825-bfaec69da684-utilities\") pod \"redhat-marketplace-pngbm\" (UID: \"8b4d6920-0938-4fcf-a825-bfaec69da684\") " pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.234103 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c516743-8030-4b76-a40b-3439c1bf3c30-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1c516743-8030-4b76-a40b-3439c1bf3c30\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.247841 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8km7q\" (UniqueName: \"kubernetes.io/projected/8b4d6920-0938-4fcf-a825-bfaec69da684-kube-api-access-8km7q\") pod \"redhat-marketplace-pngbm\" (UID: \"8b4d6920-0938-4fcf-a825-bfaec69da684\") " pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.298211 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.305724 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.374093 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-27zlm"] Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.375492 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.384186 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-27zlm"] Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.468953 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.509245 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pngbm"] Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.524185 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tg8t\" (UniqueName: \"kubernetes.io/projected/946372f5-3102-4e5c-b2af-03057f3b92dd-kube-api-access-7tg8t\") pod \"redhat-marketplace-27zlm\" (UID: \"946372f5-3102-4e5c-b2af-03057f3b92dd\") " pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.524371 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/946372f5-3102-4e5c-b2af-03057f3b92dd-catalog-content\") pod \"redhat-marketplace-27zlm\" (UID: \"946372f5-3102-4e5c-b2af-03057f3b92dd\") " pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.524489 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/946372f5-3102-4e5c-b2af-03057f3b92dd-utilities\") pod \"redhat-marketplace-27zlm\" (UID: \"946372f5-3102-4e5c-b2af-03057f3b92dd\") " pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.550950 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:48:02 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:48:02 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:48:02 crc kubenswrapper[4644]: healthz check failed Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.551015 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.625881 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tg8t\" (UniqueName: \"kubernetes.io/projected/946372f5-3102-4e5c-b2af-03057f3b92dd-kube-api-access-7tg8t\") pod \"redhat-marketplace-27zlm\" (UID: \"946372f5-3102-4e5c-b2af-03057f3b92dd\") " pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.626276 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/946372f5-3102-4e5c-b2af-03057f3b92dd-catalog-content\") pod \"redhat-marketplace-27zlm\" (UID: \"946372f5-3102-4e5c-b2af-03057f3b92dd\") " pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.626339 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/946372f5-3102-4e5c-b2af-03057f3b92dd-utilities\") pod \"redhat-marketplace-27zlm\" (UID: \"946372f5-3102-4e5c-b2af-03057f3b92dd\") " pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.627015 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/946372f5-3102-4e5c-b2af-03057f3b92dd-catalog-content\") pod \"redhat-marketplace-27zlm\" (UID: \"946372f5-3102-4e5c-b2af-03057f3b92dd\") " pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.630995 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/946372f5-3102-4e5c-b2af-03057f3b92dd-utilities\") pod \"redhat-marketplace-27zlm\" (UID: \"946372f5-3102-4e5c-b2af-03057f3b92dd\") " pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.648207 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tg8t\" (UniqueName: \"kubernetes.io/projected/946372f5-3102-4e5c-b2af-03057f3b92dd-kube-api-access-7tg8t\") pod \"redhat-marketplace-27zlm\" (UID: \"946372f5-3102-4e5c-b2af-03057f3b92dd\") " pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.702940 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.786091 4644 generic.go:334] "Generic (PLEG): container finished" podID="8b4d6920-0938-4fcf-a825-bfaec69da684" containerID="904bfa839cfea4be09a34ba4b806354a28a3129fb40c96f23d3f17375bfb659c" exitCode=0 Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.786167 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pngbm" event={"ID":"8b4d6920-0938-4fcf-a825-bfaec69da684","Type":"ContainerDied","Data":"904bfa839cfea4be09a34ba4b806354a28a3129fb40c96f23d3f17375bfb659c"} Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.786199 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pngbm" event={"ID":"8b4d6920-0938-4fcf-a825-bfaec69da684","Type":"ContainerStarted","Data":"02b66bf716c03b13322f150885ef0304fbce941da96d17de86526d1c32be4993"} Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.792792 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1c516743-8030-4b76-a40b-3439c1bf3c30","Type":"ContainerStarted","Data":"91c55075eef9f95c3eac4b70b0d0c17363a8ab5f058af110392ed975ebf73156"} Dec 13 06:48:02 crc kubenswrapper[4644]: I1213 06:48:02.926979 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-27zlm"] Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.069856 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.075522 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-8ktwh" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.177773 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gqhfh"] Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.178822 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.181686 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-dtxqg" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.181756 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.187467 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gqhfh"] Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.346141 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.346208 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnv5r\" (UniqueName: \"kubernetes.io/projected/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-kube-api-access-hnv5r\") pod \"redhat-operators-gqhfh\" (UID: \"e9452a17-eda0-4e66-bf4f-30e6e8ac8693\") " pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.346236 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.346266 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.346286 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-catalog-content\") pod \"redhat-operators-gqhfh\" (UID: \"e9452a17-eda0-4e66-bf4f-30e6e8ac8693\") " pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.346383 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.346401 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-utilities\") pod \"redhat-operators-gqhfh\" (UID: \"e9452a17-eda0-4e66-bf4f-30e6e8ac8693\") " pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.347271 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.353139 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.354026 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.356003 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.404839 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.412466 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.414279 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.433709 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.436495 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.437902 4644 patch_prober.go:28] interesting pod/console-f9d7485db-wfsz4 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.437946 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-wfsz4" podUID="ac23aa18-ed6d-4ea9-b720-aa7ccb164459" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.446945 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnv5r\" (UniqueName: \"kubernetes.io/projected/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-kube-api-access-hnv5r\") pod \"redhat-operators-gqhfh\" (UID: \"e9452a17-eda0-4e66-bf4f-30e6e8ac8693\") " pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.447024 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-catalog-content\") pod \"redhat-operators-gqhfh\" (UID: \"e9452a17-eda0-4e66-bf4f-30e6e8ac8693\") " pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.447103 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-utilities\") pod \"redhat-operators-gqhfh\" (UID: \"e9452a17-eda0-4e66-bf4f-30e6e8ac8693\") " pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.447623 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-utilities\") pod \"redhat-operators-gqhfh\" (UID: \"e9452a17-eda0-4e66-bf4f-30e6e8ac8693\") " pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.448024 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-catalog-content\") pod \"redhat-operators-gqhfh\" (UID: \"e9452a17-eda0-4e66-bf4f-30e6e8ac8693\") " pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.472694 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnv5r\" (UniqueName: \"kubernetes.io/projected/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-kube-api-access-hnv5r\") pod \"redhat-operators-gqhfh\" (UID: \"e9452a17-eda0-4e66-bf4f-30e6e8ac8693\") " pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.548437 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.552743 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:48:03 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:48:03 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:48:03 crc kubenswrapper[4644]: healthz check failed Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.552817 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.580140 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-76whk"] Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.581129 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.597053 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-76whk"] Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.612371 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:48:03 crc kubenswrapper[4644]: W1213 06:48:03.752545 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-142e1d097ab622efa4a692520de18c0a403abeee422bbea425c4be2d7d1cdf6b WatchSource:0}: Error finding container 142e1d097ab622efa4a692520de18c0a403abeee422bbea425c4be2d7d1cdf6b: Status 404 returned error can't find the container with id 142e1d097ab622efa4a692520de18c0a403abeee422bbea425c4be2d7d1cdf6b Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.759370 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c47q5\" (UniqueName: \"kubernetes.io/projected/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-kube-api-access-c47q5\") pod \"redhat-operators-76whk\" (UID: \"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e\") " pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.759485 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-catalog-content\") pod \"redhat-operators-76whk\" (UID: \"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e\") " pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.759522 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-utilities\") pod \"redhat-operators-76whk\" (UID: \"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e\") " pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.807231 4644 generic.go:334] "Generic (PLEG): container finished" podID="946372f5-3102-4e5c-b2af-03057f3b92dd" containerID="b751b472d8750e81413ff965ddbceb991003acaa076137d13ddd018b0ec99fe3" exitCode=0 Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.807353 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27zlm" event={"ID":"946372f5-3102-4e5c-b2af-03057f3b92dd","Type":"ContainerDied","Data":"b751b472d8750e81413ff965ddbceb991003acaa076137d13ddd018b0ec99fe3"} Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.807428 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27zlm" event={"ID":"946372f5-3102-4e5c-b2af-03057f3b92dd","Type":"ContainerStarted","Data":"ab875e1b89043f085d5eeef3cd12d49b154092c444c0c5f27520961a985d6dce"} Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.823319 4644 generic.go:334] "Generic (PLEG): container finished" podID="1c516743-8030-4b76-a40b-3439c1bf3c30" containerID="dd1549d3ee919cce6cd0cc5c7b22bfb93f8f835ffac3ef9b74c8e8fda96b3cb9" exitCode=0 Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.823623 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1c516743-8030-4b76-a40b-3439c1bf3c30","Type":"ContainerDied","Data":"dd1549d3ee919cce6cd0cc5c7b22bfb93f8f835ffac3ef9b74c8e8fda96b3cb9"} Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.841929 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"142e1d097ab622efa4a692520de18c0a403abeee422bbea425c4be2d7d1cdf6b"} Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.860417 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-utilities\") pod \"redhat-operators-76whk\" (UID: \"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e\") " pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.860808 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c47q5\" (UniqueName: \"kubernetes.io/projected/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-kube-api-access-c47q5\") pod \"redhat-operators-76whk\" (UID: \"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e\") " pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.860944 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-catalog-content\") pod \"redhat-operators-76whk\" (UID: \"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e\") " pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.861004 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-utilities\") pod \"redhat-operators-76whk\" (UID: \"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e\") " pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.861321 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-catalog-content\") pod \"redhat-operators-76whk\" (UID: \"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e\") " pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.891777 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c47q5\" (UniqueName: \"kubernetes.io/projected/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-kube-api-access-c47q5\") pod \"redhat-operators-76whk\" (UID: \"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e\") " pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.913531 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.946339 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gqhfh"] Dec 13 06:48:03 crc kubenswrapper[4644]: W1213 06:48:03.956754 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9452a17_eda0_4e66_bf4f_30e6e8ac8693.slice/crio-cf32b06cd1f23c88232e910e8ed7ec37e1de7d39efdf995e7bc7315ff993d4fe WatchSource:0}: Error finding container cf32b06cd1f23c88232e910e8ed7ec37e1de7d39efdf995e7bc7315ff993d4fe: Status 404 returned error can't find the container with id cf32b06cd1f23c88232e910e8ed7ec37e1de7d39efdf995e7bc7315ff993d4fe Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.996957 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 13 06:48:03 crc kubenswrapper[4644]: I1213 06:48:03.997744 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.002939 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.014571 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.014600 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 13 06:48:04 crc kubenswrapper[4644]: W1213 06:48:04.046809 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-b44e01cfc7bec887e3ee3c4138e361a13099c54ff42420614bd3ea4e3c53e025 WatchSource:0}: Error finding container b44e01cfc7bec887e3ee3c4138e361a13099c54ff42420614bd3ea4e3c53e025: Status 404 returned error can't find the container with id b44e01cfc7bec887e3ee3c4138e361a13099c54ff42420614bd3ea4e3c53e025 Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.164741 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a4d56106-15c4-4d71-b64a-e4eb2ea68bbf-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a4d56106-15c4-4d71-b64a-e4eb2ea68bbf\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.164907 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a4d56106-15c4-4d71-b64a-e4eb2ea68bbf-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a4d56106-15c4-4d71-b64a-e4eb2ea68bbf\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.266550 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a4d56106-15c4-4d71-b64a-e4eb2ea68bbf-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a4d56106-15c4-4d71-b64a-e4eb2ea68bbf\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.266609 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a4d56106-15c4-4d71-b64a-e4eb2ea68bbf-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a4d56106-15c4-4d71-b64a-e4eb2ea68bbf\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.266690 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a4d56106-15c4-4d71-b64a-e4eb2ea68bbf-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a4d56106-15c4-4d71-b64a-e4eb2ea68bbf\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.288838 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a4d56106-15c4-4d71-b64a-e4eb2ea68bbf-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a4d56106-15c4-4d71-b64a-e4eb2ea68bbf\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.350179 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.374629 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-76whk"] Dec 13 06:48:04 crc kubenswrapper[4644]: W1213 06:48:04.419985 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e3eddf5_6aec_4c15_8ae7_a7258d60be4e.slice/crio-278812383a31cbe8c7aebda128041970bc1f9109f277592a1b1416fd27928d5e WatchSource:0}: Error finding container 278812383a31cbe8c7aebda128041970bc1f9109f277592a1b1416fd27928d5e: Status 404 returned error can't find the container with id 278812383a31cbe8c7aebda128041970bc1f9109f277592a1b1416fd27928d5e Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.551975 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:48:04 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:48:04 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:48:04 crc kubenswrapper[4644]: healthz check failed Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.552213 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.610070 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.912756 4644 generic.go:334] "Generic (PLEG): container finished" podID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" containerID="1a485eb058cb24ac1ad2c331be8dda06997200219f9f5fc2a597f37cd34d9532" exitCode=0 Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.912845 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqhfh" event={"ID":"e9452a17-eda0-4e66-bf4f-30e6e8ac8693","Type":"ContainerDied","Data":"1a485eb058cb24ac1ad2c331be8dda06997200219f9f5fc2a597f37cd34d9532"} Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.913858 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqhfh" event={"ID":"e9452a17-eda0-4e66-bf4f-30e6e8ac8693","Type":"ContainerStarted","Data":"cf32b06cd1f23c88232e910e8ed7ec37e1de7d39efdf995e7bc7315ff993d4fe"} Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.946167 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"a6eaaaac98c6c17d23f884d6b6d17dea309546401ad728a2b7d9d30a71079ce7"} Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.947180 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.961071 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a4d56106-15c4-4d71-b64a-e4eb2ea68bbf","Type":"ContainerStarted","Data":"0979e2e8076e6f35b456d3bb52781e73d3c5890279d7847c43e5155a7a9e2959"} Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.976014 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"b1481699500fa4ad9840fdf6aca41fa0fe890c576c51973933bfba9c62b934a3"} Dec 13 06:48:04 crc kubenswrapper[4644]: I1213 06:48:04.976054 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"134fee987f4d04c50391d7870e3477602608590573f00b632af20ed88c6cff8e"} Dec 13 06:48:05 crc kubenswrapper[4644]: I1213 06:48:05.008752 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"c928c2d15a349e5afdc83d0f5a2f2df2600f2f0906fb6f2205ccbf3a290c189c"} Dec 13 06:48:05 crc kubenswrapper[4644]: I1213 06:48:05.008804 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"b44e01cfc7bec887e3ee3c4138e361a13099c54ff42420614bd3ea4e3c53e025"} Dec 13 06:48:05 crc kubenswrapper[4644]: I1213 06:48:05.012759 4644 generic.go:334] "Generic (PLEG): container finished" podID="1e3eddf5-6aec-4c15-8ae7-a7258d60be4e" containerID="d9aef0c6879c4d0039d2e53e6758263f48b6809bc90c2cb3c7d339fa11c98f6d" exitCode=0 Dec 13 06:48:05 crc kubenswrapper[4644]: I1213 06:48:05.013359 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-76whk" event={"ID":"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e","Type":"ContainerDied","Data":"d9aef0c6879c4d0039d2e53e6758263f48b6809bc90c2cb3c7d339fa11c98f6d"} Dec 13 06:48:05 crc kubenswrapper[4644]: I1213 06:48:05.013380 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-76whk" event={"ID":"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e","Type":"ContainerStarted","Data":"278812383a31cbe8c7aebda128041970bc1f9109f277592a1b1416fd27928d5e"} Dec 13 06:48:05 crc kubenswrapper[4644]: I1213 06:48:05.349203 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 06:48:05 crc kubenswrapper[4644]: I1213 06:48:05.487983 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c516743-8030-4b76-a40b-3439c1bf3c30-kubelet-dir\") pod \"1c516743-8030-4b76-a40b-3439c1bf3c30\" (UID: \"1c516743-8030-4b76-a40b-3439c1bf3c30\") " Dec 13 06:48:05 crc kubenswrapper[4644]: I1213 06:48:05.488106 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c516743-8030-4b76-a40b-3439c1bf3c30-kube-api-access\") pod \"1c516743-8030-4b76-a40b-3439c1bf3c30\" (UID: \"1c516743-8030-4b76-a40b-3439c1bf3c30\") " Dec 13 06:48:05 crc kubenswrapper[4644]: I1213 06:48:05.488130 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c516743-8030-4b76-a40b-3439c1bf3c30-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1c516743-8030-4b76-a40b-3439c1bf3c30" (UID: "1c516743-8030-4b76-a40b-3439c1bf3c30"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:48:05 crc kubenswrapper[4644]: I1213 06:48:05.488363 4644 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c516743-8030-4b76-a40b-3439c1bf3c30-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:05 crc kubenswrapper[4644]: I1213 06:48:05.508500 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c516743-8030-4b76-a40b-3439c1bf3c30-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1c516743-8030-4b76-a40b-3439c1bf3c30" (UID: "1c516743-8030-4b76-a40b-3439c1bf3c30"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:48:05 crc kubenswrapper[4644]: I1213 06:48:05.552064 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:48:05 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:48:05 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:48:05 crc kubenswrapper[4644]: healthz check failed Dec 13 06:48:05 crc kubenswrapper[4644]: I1213 06:48:05.552146 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:48:05 crc kubenswrapper[4644]: I1213 06:48:05.589592 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c516743-8030-4b76-a40b-3439c1bf3c30-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:06 crc kubenswrapper[4644]: I1213 06:48:06.037136 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-hn5l6" Dec 13 06:48:06 crc kubenswrapper[4644]: I1213 06:48:06.047946 4644 generic.go:334] "Generic (PLEG): container finished" podID="a4d56106-15c4-4d71-b64a-e4eb2ea68bbf" containerID="d47d5912e8c1216398008155c5c1b9b60571c9fb8db30e6fb4e73daf028a97f2" exitCode=0 Dec 13 06:48:06 crc kubenswrapper[4644]: I1213 06:48:06.048069 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a4d56106-15c4-4d71-b64a-e4eb2ea68bbf","Type":"ContainerDied","Data":"d47d5912e8c1216398008155c5c1b9b60571c9fb8db30e6fb4e73daf028a97f2"} Dec 13 06:48:06 crc kubenswrapper[4644]: I1213 06:48:06.057032 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 06:48:06 crc kubenswrapper[4644]: I1213 06:48:06.057709 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1c516743-8030-4b76-a40b-3439c1bf3c30","Type":"ContainerDied","Data":"91c55075eef9f95c3eac4b70b0d0c17363a8ab5f058af110392ed975ebf73156"} Dec 13 06:48:06 crc kubenswrapper[4644]: I1213 06:48:06.057766 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="91c55075eef9f95c3eac4b70b0d0c17363a8ab5f058af110392ed975ebf73156" Dec 13 06:48:06 crc kubenswrapper[4644]: I1213 06:48:06.551950 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:48:06 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:48:06 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:48:06 crc kubenswrapper[4644]: healthz check failed Dec 13 06:48:06 crc kubenswrapper[4644]: I1213 06:48:06.552485 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:48:07 crc kubenswrapper[4644]: I1213 06:48:07.551714 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:48:07 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:48:07 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:48:07 crc kubenswrapper[4644]: healthz check failed Dec 13 06:48:07 crc kubenswrapper[4644]: I1213 06:48:07.551953 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:48:08 crc kubenswrapper[4644]: I1213 06:48:08.552886 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:48:08 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:48:08 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:48:08 crc kubenswrapper[4644]: healthz check failed Dec 13 06:48:08 crc kubenswrapper[4644]: I1213 06:48:08.552946 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:48:09 crc kubenswrapper[4644]: I1213 06:48:09.552485 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:48:09 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:48:09 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:48:09 crc kubenswrapper[4644]: healthz check failed Dec 13 06:48:09 crc kubenswrapper[4644]: I1213 06:48:09.552873 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:48:09 crc kubenswrapper[4644]: I1213 06:48:09.753872 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 06:48:09 crc kubenswrapper[4644]: I1213 06:48:09.753970 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 06:48:09 crc kubenswrapper[4644]: I1213 06:48:09.818067 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 06:48:09 crc kubenswrapper[4644]: I1213 06:48:09.954875 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a4d56106-15c4-4d71-b64a-e4eb2ea68bbf-kubelet-dir\") pod \"a4d56106-15c4-4d71-b64a-e4eb2ea68bbf\" (UID: \"a4d56106-15c4-4d71-b64a-e4eb2ea68bbf\") " Dec 13 06:48:09 crc kubenswrapper[4644]: I1213 06:48:09.955017 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a4d56106-15c4-4d71-b64a-e4eb2ea68bbf-kube-api-access\") pod \"a4d56106-15c4-4d71-b64a-e4eb2ea68bbf\" (UID: \"a4d56106-15c4-4d71-b64a-e4eb2ea68bbf\") " Dec 13 06:48:09 crc kubenswrapper[4644]: I1213 06:48:09.955265 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a4d56106-15c4-4d71-b64a-e4eb2ea68bbf-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a4d56106-15c4-4d71-b64a-e4eb2ea68bbf" (UID: "a4d56106-15c4-4d71-b64a-e4eb2ea68bbf"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:48:09 crc kubenswrapper[4644]: I1213 06:48:09.955573 4644 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a4d56106-15c4-4d71-b64a-e4eb2ea68bbf-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:09 crc kubenswrapper[4644]: I1213 06:48:09.968685 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4d56106-15c4-4d71-b64a-e4eb2ea68bbf-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a4d56106-15c4-4d71-b64a-e4eb2ea68bbf" (UID: "a4d56106-15c4-4d71-b64a-e4eb2ea68bbf"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:48:10 crc kubenswrapper[4644]: I1213 06:48:10.057091 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a4d56106-15c4-4d71-b64a-e4eb2ea68bbf-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:10 crc kubenswrapper[4644]: I1213 06:48:10.088639 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a4d56106-15c4-4d71-b64a-e4eb2ea68bbf","Type":"ContainerDied","Data":"0979e2e8076e6f35b456d3bb52781e73d3c5890279d7847c43e5155a7a9e2959"} Dec 13 06:48:10 crc kubenswrapper[4644]: I1213 06:48:10.088680 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0979e2e8076e6f35b456d3bb52781e73d3c5890279d7847c43e5155a7a9e2959" Dec 13 06:48:10 crc kubenswrapper[4644]: I1213 06:48:10.088803 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 06:48:10 crc kubenswrapper[4644]: I1213 06:48:10.550777 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:48:10 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:48:10 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:48:10 crc kubenswrapper[4644]: healthz check failed Dec 13 06:48:10 crc kubenswrapper[4644]: I1213 06:48:10.550840 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:48:11 crc kubenswrapper[4644]: I1213 06:48:11.550874 4644 patch_prober.go:28] interesting pod/router-default-5444994796-vfb94 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 06:48:11 crc kubenswrapper[4644]: [-]has-synced failed: reason withheld Dec 13 06:48:11 crc kubenswrapper[4644]: [+]process-running ok Dec 13 06:48:11 crc kubenswrapper[4644]: healthz check failed Dec 13 06:48:11 crc kubenswrapper[4644]: I1213 06:48:11.550959 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vfb94" podUID="3f1dbe36-7205-4fac-9ce9-0194d29a6bba" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 06:48:12 crc kubenswrapper[4644]: I1213 06:48:12.551589 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:48:12 crc kubenswrapper[4644]: I1213 06:48:12.554895 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-vfb94" Dec 13 06:48:13 crc kubenswrapper[4644]: I1213 06:48:13.429043 4644 patch_prober.go:28] interesting pod/console-f9d7485db-wfsz4 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 13 06:48:13 crc kubenswrapper[4644]: I1213 06:48:13.429121 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-wfsz4" podUID="ac23aa18-ed6d-4ea9-b720-aa7ccb164459" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 13 06:48:16 crc kubenswrapper[4644]: I1213 06:48:16.656169 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mh84f"] Dec 13 06:48:16 crc kubenswrapper[4644]: I1213 06:48:16.656870 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" podUID="81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64" containerName="controller-manager" containerID="cri-o://ffd1633bf05c6df1c64bb80b5d05803719fcf2c6c7dab02af4097649fadf7608" gracePeriod=30 Dec 13 06:48:16 crc kubenswrapper[4644]: I1213 06:48:16.659802 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w"] Dec 13 06:48:16 crc kubenswrapper[4644]: I1213 06:48:16.660049 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" podUID="33355e8a-5a62-49d4-8c71-ab546cdbf141" containerName="route-controller-manager" containerID="cri-o://05e30bf26d1a4e1323853cdfb4869fe4888321c239cf20ef9299c4bb45b67ba1" gracePeriod=30 Dec 13 06:48:17 crc kubenswrapper[4644]: I1213 06:48:17.135498 4644 generic.go:334] "Generic (PLEG): container finished" podID="33355e8a-5a62-49d4-8c71-ab546cdbf141" containerID="05e30bf26d1a4e1323853cdfb4869fe4888321c239cf20ef9299c4bb45b67ba1" exitCode=0 Dec 13 06:48:17 crc kubenswrapper[4644]: I1213 06:48:17.135561 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" event={"ID":"33355e8a-5a62-49d4-8c71-ab546cdbf141","Type":"ContainerDied","Data":"05e30bf26d1a4e1323853cdfb4869fe4888321c239cf20ef9299c4bb45b67ba1"} Dec 13 06:48:17 crc kubenswrapper[4644]: I1213 06:48:17.138149 4644 generic.go:334] "Generic (PLEG): container finished" podID="81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64" containerID="ffd1633bf05c6df1c64bb80b5d05803719fcf2c6c7dab02af4097649fadf7608" exitCode=0 Dec 13 06:48:17 crc kubenswrapper[4644]: I1213 06:48:17.138176 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" event={"ID":"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64","Type":"ContainerDied","Data":"ffd1633bf05c6df1c64bb80b5d05803719fcf2c6c7dab02af4097649fadf7608"} Dec 13 06:48:18 crc kubenswrapper[4644]: I1213 06:48:18.486595 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs\") pod \"network-metrics-daemon-c88wl\" (UID: \"ae945e75-99b5-40b6-851d-dc9348056cdb\") " pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:48:18 crc kubenswrapper[4644]: I1213 06:48:18.491768 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ae945e75-99b5-40b6-851d-dc9348056cdb-metrics-certs\") pod \"network-metrics-daemon-c88wl\" (UID: \"ae945e75-99b5-40b6-851d-dc9348056cdb\") " pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:48:18 crc kubenswrapper[4644]: I1213 06:48:18.600231 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c88wl" Dec 13 06:48:19 crc kubenswrapper[4644]: I1213 06:48:19.945335 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.771164 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.804469 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-78bc4cb977-c5l6c"] Dec 13 06:48:22 crc kubenswrapper[4644]: E1213 06:48:22.809480 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64" containerName="controller-manager" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.809505 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64" containerName="controller-manager" Dec 13 06:48:22 crc kubenswrapper[4644]: E1213 06:48:22.809524 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4d56106-15c4-4d71-b64a-e4eb2ea68bbf" containerName="pruner" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.809547 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4d56106-15c4-4d71-b64a-e4eb2ea68bbf" containerName="pruner" Dec 13 06:48:22 crc kubenswrapper[4644]: E1213 06:48:22.809555 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c516743-8030-4b76-a40b-3439c1bf3c30" containerName="pruner" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.809561 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c516743-8030-4b76-a40b-3439c1bf3c30" containerName="pruner" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.809681 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64" containerName="controller-manager" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.809708 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4d56106-15c4-4d71-b64a-e4eb2ea68bbf" containerName="pruner" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.809720 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c516743-8030-4b76-a40b-3439c1bf3c30" containerName="pruner" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.810401 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-78bc4cb977-c5l6c"] Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.810516 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.914210 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.946211 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-client-ca\") pod \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.946311 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-serving-cert\") pod \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.946367 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqmnn\" (UniqueName: \"kubernetes.io/projected/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-kube-api-access-tqmnn\") pod \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.946478 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-config\") pod \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.946523 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-proxy-ca-bundles\") pod \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\" (UID: \"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64\") " Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.947087 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-client-ca" (OuterVolumeSpecName: "client-ca") pod "81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64" (UID: "81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.947167 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/079619c8-cbc6-485e-a90d-72b905d379b0-serving-cert\") pod \"controller-manager-78bc4cb977-c5l6c\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.947298 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-client-ca\") pod \"controller-manager-78bc4cb977-c5l6c\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.947364 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-config\") pod \"controller-manager-78bc4cb977-c5l6c\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.947415 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7h7b\" (UniqueName: \"kubernetes.io/projected/079619c8-cbc6-485e-a90d-72b905d379b0-kube-api-access-t7h7b\") pod \"controller-manager-78bc4cb977-c5l6c\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.947471 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-proxy-ca-bundles\") pod \"controller-manager-78bc4cb977-c5l6c\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.947687 4644 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.947702 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64" (UID: "81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.949048 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-config" (OuterVolumeSpecName: "config") pod "81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64" (UID: "81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.954502 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64" (UID: "81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:48:22 crc kubenswrapper[4644]: I1213 06:48:22.959760 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-kube-api-access-tqmnn" (OuterVolumeSpecName: "kube-api-access-tqmnn") pod "81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64" (UID: "81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64"). InnerVolumeSpecName "kube-api-access-tqmnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.048061 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33355e8a-5a62-49d4-8c71-ab546cdbf141-config\") pod \"33355e8a-5a62-49d4-8c71-ab546cdbf141\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.048106 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xd8ml\" (UniqueName: \"kubernetes.io/projected/33355e8a-5a62-49d4-8c71-ab546cdbf141-kube-api-access-xd8ml\") pod \"33355e8a-5a62-49d4-8c71-ab546cdbf141\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.048155 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33355e8a-5a62-49d4-8c71-ab546cdbf141-serving-cert\") pod \"33355e8a-5a62-49d4-8c71-ab546cdbf141\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.048270 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/33355e8a-5a62-49d4-8c71-ab546cdbf141-client-ca\") pod \"33355e8a-5a62-49d4-8c71-ab546cdbf141\" (UID: \"33355e8a-5a62-49d4-8c71-ab546cdbf141\") " Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.048716 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7h7b\" (UniqueName: \"kubernetes.io/projected/079619c8-cbc6-485e-a90d-72b905d379b0-kube-api-access-t7h7b\") pod \"controller-manager-78bc4cb977-c5l6c\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.048791 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-proxy-ca-bundles\") pod \"controller-manager-78bc4cb977-c5l6c\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.048820 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/079619c8-cbc6-485e-a90d-72b905d379b0-serving-cert\") pod \"controller-manager-78bc4cb977-c5l6c\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.049745 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-client-ca\") pod \"controller-manager-78bc4cb977-c5l6c\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.049810 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-config\") pod \"controller-manager-78bc4cb977-c5l6c\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.049861 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.049879 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqmnn\" (UniqueName: \"kubernetes.io/projected/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-kube-api-access-tqmnn\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.049892 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.049902 4644 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.051238 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-config\") pod \"controller-manager-78bc4cb977-c5l6c\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.051298 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-proxy-ca-bundles\") pod \"controller-manager-78bc4cb977-c5l6c\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.051841 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33355e8a-5a62-49d4-8c71-ab546cdbf141-client-ca" (OuterVolumeSpecName: "client-ca") pod "33355e8a-5a62-49d4-8c71-ab546cdbf141" (UID: "33355e8a-5a62-49d4-8c71-ab546cdbf141"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.052815 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-client-ca\") pod \"controller-manager-78bc4cb977-c5l6c\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.052985 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33355e8a-5a62-49d4-8c71-ab546cdbf141-config" (OuterVolumeSpecName: "config") pod "33355e8a-5a62-49d4-8c71-ab546cdbf141" (UID: "33355e8a-5a62-49d4-8c71-ab546cdbf141"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.053214 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33355e8a-5a62-49d4-8c71-ab546cdbf141-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "33355e8a-5a62-49d4-8c71-ab546cdbf141" (UID: "33355e8a-5a62-49d4-8c71-ab546cdbf141"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.055521 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-c88wl"] Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.055729 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/079619c8-cbc6-485e-a90d-72b905d379b0-serving-cert\") pod \"controller-manager-78bc4cb977-c5l6c\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.056117 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33355e8a-5a62-49d4-8c71-ab546cdbf141-kube-api-access-xd8ml" (OuterVolumeSpecName: "kube-api-access-xd8ml") pod "33355e8a-5a62-49d4-8c71-ab546cdbf141" (UID: "33355e8a-5a62-49d4-8c71-ab546cdbf141"). InnerVolumeSpecName "kube-api-access-xd8ml". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.069266 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7h7b\" (UniqueName: \"kubernetes.io/projected/079619c8-cbc6-485e-a90d-72b905d379b0-kube-api-access-t7h7b\") pod \"controller-manager-78bc4cb977-c5l6c\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.151238 4644 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/33355e8a-5a62-49d4-8c71-ab546cdbf141-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.151271 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33355e8a-5a62-49d4-8c71-ab546cdbf141-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.151280 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xd8ml\" (UniqueName: \"kubernetes.io/projected/33355e8a-5a62-49d4-8c71-ab546cdbf141-kube-api-access-xd8ml\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.151293 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33355e8a-5a62-49d4-8c71-ab546cdbf141-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.176354 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-76whk" event={"ID":"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e","Type":"ContainerStarted","Data":"875705816c6647192063f6df04df9c18137e28d9672b1e4aac6a921abba82cdf"} Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.178735 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.179664 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mh84f" event={"ID":"81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64","Type":"ContainerDied","Data":"6302f161f1f349b08e2b4f03de62c48c9ff6a4180f9325c48b9a1b35bcd5a488"} Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.179715 4644 scope.go:117] "RemoveContainer" containerID="ffd1633bf05c6df1c64bb80b5d05803719fcf2c6c7dab02af4097649fadf7608" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.187757 4644 generic.go:334] "Generic (PLEG): container finished" podID="10291551-2baf-4271-bc49-6a40e5ceb94b" containerID="bd5c77c633c156461e97f0dcacc96167db72c22772078c7f793efd58c44b80c0" exitCode=0 Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.188021 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-54mm5" event={"ID":"10291551-2baf-4271-bc49-6a40e5ceb94b","Type":"ContainerDied","Data":"bd5c77c633c156461e97f0dcacc96167db72c22772078c7f793efd58c44b80c0"} Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.198551 4644 generic.go:334] "Generic (PLEG): container finished" podID="946372f5-3102-4e5c-b2af-03057f3b92dd" containerID="99573b75f1c5839caa15e2425ecc5fd50aeede653a85f18920bfd1b8813a891d" exitCode=0 Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.198622 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27zlm" event={"ID":"946372f5-3102-4e5c-b2af-03057f3b92dd","Type":"ContainerDied","Data":"99573b75f1c5839caa15e2425ecc5fd50aeede653a85f18920bfd1b8813a891d"} Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.202502 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" event={"ID":"33355e8a-5a62-49d4-8c71-ab546cdbf141","Type":"ContainerDied","Data":"cb2a4eb8f17e723ea45fab3bcf7a12ee30fb789abc62be00d64255deb87fe573"} Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.202524 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.207773 4644 generic.go:334] "Generic (PLEG): container finished" podID="8b4d6920-0938-4fcf-a825-bfaec69da684" containerID="f83496b87e599da0720cd2d8324f929605305836a85b1bb5d08acce122934647" exitCode=0 Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.207853 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pngbm" event={"ID":"8b4d6920-0938-4fcf-a825-bfaec69da684","Type":"ContainerDied","Data":"f83496b87e599da0720cd2d8324f929605305836a85b1bb5d08acce122934647"} Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.208808 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.219113 4644 generic.go:334] "Generic (PLEG): container finished" podID="4b355a1b-28e0-462c-a1ef-43eea6341565" containerID="d3ac3afb692ecd1394722bb0326bc5075372023eb4cc09244bd228e403fbe225" exitCode=0 Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.219223 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2bmvc" event={"ID":"4b355a1b-28e0-462c-a1ef-43eea6341565","Type":"ContainerDied","Data":"d3ac3afb692ecd1394722bb0326bc5075372023eb4cc09244bd228e403fbe225"} Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.231748 4644 generic.go:334] "Generic (PLEG): container finished" podID="a642cd12-49f1-4784-86ce-05f0291c9049" containerID="4deeb98aea42e1c5836ce3ef9bac861cb27ba24c851389e81b512913738e274c" exitCode=0 Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.231805 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2gkk" event={"ID":"a642cd12-49f1-4784-86ce-05f0291c9049","Type":"ContainerDied","Data":"4deeb98aea42e1c5836ce3ef9bac861cb27ba24c851389e81b512913738e274c"} Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.236836 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqhfh" event={"ID":"e9452a17-eda0-4e66-bf4f-30e6e8ac8693","Type":"ContainerStarted","Data":"18544f7158f117b0d332a235e59e34a9a6a082154d4b31619a3fb49fbd38d7b6"} Dec 13 06:48:23 crc kubenswrapper[4644]: W1213 06:48:23.239455 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae945e75_99b5_40b6_851d_dc9348056cdb.slice/crio-8a5c398474a8e475e7cfb6f03550b1c317ea5e6907ac9c7ef95ac235204078f8 WatchSource:0}: Error finding container 8a5c398474a8e475e7cfb6f03550b1c317ea5e6907ac9c7ef95ac235204078f8: Status 404 returned error can't find the container with id 8a5c398474a8e475e7cfb6f03550b1c317ea5e6907ac9c7ef95ac235204078f8 Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.242672 4644 generic.go:334] "Generic (PLEG): container finished" podID="b8b426e8-8b8b-4168-afaa-a5cbb6752fdb" containerID="73118887678da774cbb6c2837545329f6563f7ccdf752d3d174007b70e957bde" exitCode=0 Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.242719 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-szmwp" event={"ID":"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb","Type":"ContainerDied","Data":"73118887678da774cbb6c2837545329f6563f7ccdf752d3d174007b70e957bde"} Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.334490 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mh84f"] Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.336547 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mh84f"] Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.343808 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w"] Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.347260 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-bmz5w"] Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.359110 4644 scope.go:117] "RemoveContainer" containerID="05e30bf26d1a4e1323853cdfb4869fe4888321c239cf20ef9299c4bb45b67ba1" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.440671 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.444698 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:48:23 crc kubenswrapper[4644]: I1213 06:48:23.444831 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-78bc4cb977-c5l6c"] Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.251481 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-szmwp" event={"ID":"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb","Type":"ContainerStarted","Data":"bc4cf461cf4903c7cbc065ecb98e341b12988233689de7f277aa554bbc352d6d"} Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.254313 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27zlm" event={"ID":"946372f5-3102-4e5c-b2af-03057f3b92dd","Type":"ContainerStarted","Data":"b79fcb7a30c6e24a2b2e5b948ef673d7d0705c03982d40f672399445a551d3b2"} Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.256827 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-54mm5" event={"ID":"10291551-2baf-4271-bc49-6a40e5ceb94b","Type":"ContainerStarted","Data":"1fe23cd40df07897833dbd638f5359afa9604c782334dfcb9c06b9600ef516d9"} Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.258590 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2bmvc" event={"ID":"4b355a1b-28e0-462c-a1ef-43eea6341565","Type":"ContainerStarted","Data":"53d9e38c4aa5494a6091b4cf1765c744f439df0001d5243d593272ec35ef5f3d"} Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.260270 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2gkk" event={"ID":"a642cd12-49f1-4784-86ce-05f0291c9049","Type":"ContainerStarted","Data":"cc051242d1d848310a2e6a6f724974f1f8e9696e031e5b2ddd8d66b43f6a394b"} Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.261772 4644 generic.go:334] "Generic (PLEG): container finished" podID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" containerID="18544f7158f117b0d332a235e59e34a9a6a082154d4b31619a3fb49fbd38d7b6" exitCode=0 Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.261841 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqhfh" event={"ID":"e9452a17-eda0-4e66-bf4f-30e6e8ac8693","Type":"ContainerDied","Data":"18544f7158f117b0d332a235e59e34a9a6a082154d4b31619a3fb49fbd38d7b6"} Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.266270 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pngbm" event={"ID":"8b4d6920-0938-4fcf-a825-bfaec69da684","Type":"ContainerStarted","Data":"5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420"} Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.267686 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" event={"ID":"079619c8-cbc6-485e-a90d-72b905d379b0","Type":"ContainerStarted","Data":"d75d3f21936747976edca09dcc6db919a9a1b1f39f3f02ed114243e2e0255416"} Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.267728 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" event={"ID":"079619c8-cbc6-485e-a90d-72b905d379b0","Type":"ContainerStarted","Data":"6fe061a49ce09552d141e0684405e6eca0fcef4eb05927f69dd6aaba9828c53f"} Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.267843 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.269283 4644 generic.go:334] "Generic (PLEG): container finished" podID="1e3eddf5-6aec-4c15-8ae7-a7258d60be4e" containerID="875705816c6647192063f6df04df9c18137e28d9672b1e4aac6a921abba82cdf" exitCode=0 Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.269346 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-76whk" event={"ID":"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e","Type":"ContainerDied","Data":"875705816c6647192063f6df04df9c18137e28d9672b1e4aac6a921abba82cdf"} Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.272867 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-c88wl" event={"ID":"ae945e75-99b5-40b6-851d-dc9348056cdb","Type":"ContainerStarted","Data":"12ce7480695267dac4a88083f3aaa9dbdcabb91344240e180147ab75ce3b0614"} Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.272907 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-c88wl" event={"ID":"ae945e75-99b5-40b6-851d-dc9348056cdb","Type":"ContainerStarted","Data":"93b50213f1096f33212852b1fc25f5efc8c0bef129fe385bbb483e780d32d931"} Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.272924 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-c88wl" event={"ID":"ae945e75-99b5-40b6-851d-dc9348056cdb","Type":"ContainerStarted","Data":"8a5c398474a8e475e7cfb6f03550b1c317ea5e6907ac9c7ef95ac235204078f8"} Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.275604 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.279558 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-szmwp" podStartSLOduration=2.225730799 podStartE2EDuration="24.279546046s" podCreationTimestamp="2025-12-13 06:48:00 +0000 UTC" firstStartedPulling="2025-12-13 06:48:01.771181533 +0000 UTC m=+143.986132365" lastFinishedPulling="2025-12-13 06:48:23.824996779 +0000 UTC m=+166.039947612" observedRunningTime="2025-12-13 06:48:24.276799231 +0000 UTC m=+166.491750064" watchObservedRunningTime="2025-12-13 06:48:24.279546046 +0000 UTC m=+166.494496879" Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.294012 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-54mm5" podStartSLOduration=2.326547923 podStartE2EDuration="24.293993427s" podCreationTimestamp="2025-12-13 06:48:00 +0000 UTC" firstStartedPulling="2025-12-13 06:48:01.772939679 +0000 UTC m=+143.987890512" lastFinishedPulling="2025-12-13 06:48:23.740385183 +0000 UTC m=+165.955336016" observedRunningTime="2025-12-13 06:48:24.293947351 +0000 UTC m=+166.508898184" watchObservedRunningTime="2025-12-13 06:48:24.293993427 +0000 UTC m=+166.508944260" Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.310081 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pngbm" podStartSLOduration=2.341755144 podStartE2EDuration="23.310065594s" podCreationTimestamp="2025-12-13 06:48:01 +0000 UTC" firstStartedPulling="2025-12-13 06:48:02.78826427 +0000 UTC m=+145.003215103" lastFinishedPulling="2025-12-13 06:48:23.75657472 +0000 UTC m=+165.971525553" observedRunningTime="2025-12-13 06:48:24.307211546 +0000 UTC m=+166.522162379" watchObservedRunningTime="2025-12-13 06:48:24.310065594 +0000 UTC m=+166.525016427" Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.324566 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-g2gkk" podStartSLOduration=2.202416784 podStartE2EDuration="24.324538112s" podCreationTimestamp="2025-12-13 06:48:00 +0000 UTC" firstStartedPulling="2025-12-13 06:48:01.774427778 +0000 UTC m=+143.989378611" lastFinishedPulling="2025-12-13 06:48:23.896549115 +0000 UTC m=+166.111499939" observedRunningTime="2025-12-13 06:48:24.324068519 +0000 UTC m=+166.539019351" watchObservedRunningTime="2025-12-13 06:48:24.324538112 +0000 UTC m=+166.539488945" Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.363288 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-27zlm" podStartSLOduration=2.389064477 podStartE2EDuration="22.36326801s" podCreationTimestamp="2025-12-13 06:48:02 +0000 UTC" firstStartedPulling="2025-12-13 06:48:03.810598023 +0000 UTC m=+146.025548856" lastFinishedPulling="2025-12-13 06:48:23.784801556 +0000 UTC m=+165.999752389" observedRunningTime="2025-12-13 06:48:24.361462365 +0000 UTC m=+166.576413197" watchObservedRunningTime="2025-12-13 06:48:24.36326801 +0000 UTC m=+166.578218843" Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.381062 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-c88wl" podStartSLOduration=148.38104398 podStartE2EDuration="2m28.38104398s" podCreationTimestamp="2025-12-13 06:45:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:48:24.377460653 +0000 UTC m=+166.592411485" watchObservedRunningTime="2025-12-13 06:48:24.38104398 +0000 UTC m=+166.595994814" Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.406145 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33355e8a-5a62-49d4-8c71-ab546cdbf141" path="/var/lib/kubelet/pods/33355e8a-5a62-49d4-8c71-ab546cdbf141/volumes" Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.406849 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64" path="/var/lib/kubelet/pods/81fbb35d-bcd1-4c92-bc7d-2fcbd17c1d64/volumes" Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.417082 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" podStartSLOduration=8.417062810000001 podStartE2EDuration="8.41706281s" podCreationTimestamp="2025-12-13 06:48:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:48:24.40421081 +0000 UTC m=+166.619161643" watchObservedRunningTime="2025-12-13 06:48:24.41706281 +0000 UTC m=+166.632013643" Dec 13 06:48:24 crc kubenswrapper[4644]: I1213 06:48:24.449816 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2bmvc" podStartSLOduration=2.253686465 podStartE2EDuration="25.449797012s" podCreationTimestamp="2025-12-13 06:47:59 +0000 UTC" firstStartedPulling="2025-12-13 06:48:00.764115243 +0000 UTC m=+142.979066076" lastFinishedPulling="2025-12-13 06:48:23.96022579 +0000 UTC m=+166.175176623" observedRunningTime="2025-12-13 06:48:24.449528016 +0000 UTC m=+166.664478850" watchObservedRunningTime="2025-12-13 06:48:24.449797012 +0000 UTC m=+166.664747845" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.281723 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-76whk" event={"ID":"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e","Type":"ContainerStarted","Data":"b159ba4d6272b401e619a08420e527c0433964b03a53534e281942b598f77596"} Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.284013 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqhfh" event={"ID":"e9452a17-eda0-4e66-bf4f-30e6e8ac8693","Type":"ContainerStarted","Data":"3a5ec60ffb3914f4bef7e7105fd5b5ad42f867ba66e2f5ec1dbe5b8dcd47f92f"} Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.301041 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-76whk" podStartSLOduration=2.529762184 podStartE2EDuration="22.30102389s" podCreationTimestamp="2025-12-13 06:48:03 +0000 UTC" firstStartedPulling="2025-12-13 06:48:05.031356878 +0000 UTC m=+147.246307711" lastFinishedPulling="2025-12-13 06:48:24.802618584 +0000 UTC m=+167.017569417" observedRunningTime="2025-12-13 06:48:25.297790499 +0000 UTC m=+167.512741333" watchObservedRunningTime="2025-12-13 06:48:25.30102389 +0000 UTC m=+167.515974722" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.313130 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gqhfh" podStartSLOduration=2.387200863 podStartE2EDuration="22.313111012s" podCreationTimestamp="2025-12-13 06:48:03 +0000 UTC" firstStartedPulling="2025-12-13 06:48:04.915174788 +0000 UTC m=+147.130125620" lastFinishedPulling="2025-12-13 06:48:24.841084936 +0000 UTC m=+167.056035769" observedRunningTime="2025-12-13 06:48:25.310729783 +0000 UTC m=+167.525680616" watchObservedRunningTime="2025-12-13 06:48:25.313111012 +0000 UTC m=+167.528061845" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.449942 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr"] Dec 13 06:48:25 crc kubenswrapper[4644]: E1213 06:48:25.450127 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33355e8a-5a62-49d4-8c71-ab546cdbf141" containerName="route-controller-manager" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.450139 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="33355e8a-5a62-49d4-8c71-ab546cdbf141" containerName="route-controller-manager" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.450236 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="33355e8a-5a62-49d4-8c71-ab546cdbf141" containerName="route-controller-manager" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.450589 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.453110 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.453209 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.454300 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.454777 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.454901 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.455348 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.461651 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr"] Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.601393 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8aae84d1-53df-46e4-9275-cff9d6af6a81-config\") pod \"route-controller-manager-7c588587d7-jn9zr\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.601505 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8aae84d1-53df-46e4-9275-cff9d6af6a81-client-ca\") pod \"route-controller-manager-7c588587d7-jn9zr\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.601549 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lslv\" (UniqueName: \"kubernetes.io/projected/8aae84d1-53df-46e4-9275-cff9d6af6a81-kube-api-access-7lslv\") pod \"route-controller-manager-7c588587d7-jn9zr\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.601572 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8aae84d1-53df-46e4-9275-cff9d6af6a81-serving-cert\") pod \"route-controller-manager-7c588587d7-jn9zr\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.703207 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8aae84d1-53df-46e4-9275-cff9d6af6a81-config\") pod \"route-controller-manager-7c588587d7-jn9zr\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.703294 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8aae84d1-53df-46e4-9275-cff9d6af6a81-client-ca\") pod \"route-controller-manager-7c588587d7-jn9zr\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.703339 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lslv\" (UniqueName: \"kubernetes.io/projected/8aae84d1-53df-46e4-9275-cff9d6af6a81-kube-api-access-7lslv\") pod \"route-controller-manager-7c588587d7-jn9zr\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.703365 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8aae84d1-53df-46e4-9275-cff9d6af6a81-serving-cert\") pod \"route-controller-manager-7c588587d7-jn9zr\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.704566 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8aae84d1-53df-46e4-9275-cff9d6af6a81-client-ca\") pod \"route-controller-manager-7c588587d7-jn9zr\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.704709 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8aae84d1-53df-46e4-9275-cff9d6af6a81-config\") pod \"route-controller-manager-7c588587d7-jn9zr\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.718662 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lslv\" (UniqueName: \"kubernetes.io/projected/8aae84d1-53df-46e4-9275-cff9d6af6a81-kube-api-access-7lslv\") pod \"route-controller-manager-7c588587d7-jn9zr\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.721352 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8aae84d1-53df-46e4-9275-cff9d6af6a81-serving-cert\") pod \"route-controller-manager-7c588587d7-jn9zr\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:25 crc kubenswrapper[4644]: I1213 06:48:25.762890 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:26 crc kubenswrapper[4644]: I1213 06:48:26.004739 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr"] Dec 13 06:48:26 crc kubenswrapper[4644]: I1213 06:48:26.290220 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" event={"ID":"8aae84d1-53df-46e4-9275-cff9d6af6a81","Type":"ContainerStarted","Data":"e7afef56a44260eff728a3e9d8f5ce2ac2a5132680d6cf3c85c27b9c147665dd"} Dec 13 06:48:26 crc kubenswrapper[4644]: I1213 06:48:26.290557 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" event={"ID":"8aae84d1-53df-46e4-9275-cff9d6af6a81","Type":"ContainerStarted","Data":"b33e654e6fda94e0cca3e58b8f9d80302f6719108fcc7b5c56232231a7039743"} Dec 13 06:48:26 crc kubenswrapper[4644]: I1213 06:48:26.305535 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" podStartSLOduration=10.305509165 podStartE2EDuration="10.305509165s" podCreationTimestamp="2025-12-13 06:48:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:48:26.304199994 +0000 UTC m=+168.519150827" watchObservedRunningTime="2025-12-13 06:48:26.305509165 +0000 UTC m=+168.520459998" Dec 13 06:48:27 crc kubenswrapper[4644]: I1213 06:48:27.294224 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:27 crc kubenswrapper[4644]: I1213 06:48:27.299422 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:30 crc kubenswrapper[4644]: I1213 06:48:30.300097 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:48:30 crc kubenswrapper[4644]: I1213 06:48:30.300322 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:48:30 crc kubenswrapper[4644]: I1213 06:48:30.372338 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:48:30 crc kubenswrapper[4644]: I1213 06:48:30.419080 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:48:30 crc kubenswrapper[4644]: I1213 06:48:30.506664 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:48:30 crc kubenswrapper[4644]: I1213 06:48:30.506746 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:48:30 crc kubenswrapper[4644]: I1213 06:48:30.536356 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:48:30 crc kubenswrapper[4644]: I1213 06:48:30.704974 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:30 crc kubenswrapper[4644]: I1213 06:48:30.705023 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:30 crc kubenswrapper[4644]: I1213 06:48:30.734742 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:30 crc kubenswrapper[4644]: I1213 06:48:30.942610 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:30 crc kubenswrapper[4644]: I1213 06:48:30.943438 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:30 crc kubenswrapper[4644]: I1213 06:48:30.973724 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:31 crc kubenswrapper[4644]: I1213 06:48:31.346370 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:31 crc kubenswrapper[4644]: I1213 06:48:31.346968 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:48:31 crc kubenswrapper[4644]: I1213 06:48:31.348453 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:32 crc kubenswrapper[4644]: I1213 06:48:32.202862 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g2gkk"] Dec 13 06:48:32 crc kubenswrapper[4644]: I1213 06:48:32.305840 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:48:32 crc kubenswrapper[4644]: I1213 06:48:32.305893 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:48:32 crc kubenswrapper[4644]: I1213 06:48:32.334955 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:48:32 crc kubenswrapper[4644]: I1213 06:48:32.703270 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:32 crc kubenswrapper[4644]: I1213 06:48:32.703343 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:32 crc kubenswrapper[4644]: I1213 06:48:32.732473 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:32 crc kubenswrapper[4644]: I1213 06:48:32.805322 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-szmwp"] Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.328008 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-g2gkk" podUID="a642cd12-49f1-4784-86ce-05f0291c9049" containerName="registry-server" containerID="cri-o://cc051242d1d848310a2e6a6f724974f1f8e9696e031e5b2ddd8d66b43f6a394b" gracePeriod=2 Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.360339 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.360416 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.415859 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.613035 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.613100 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.650250 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.740981 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.914293 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.914372 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.921273 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a642cd12-49f1-4784-86ce-05f0291c9049-catalog-content\") pod \"a642cd12-49f1-4784-86ce-05f0291c9049\" (UID: \"a642cd12-49f1-4784-86ce-05f0291c9049\") " Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.921364 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a642cd12-49f1-4784-86ce-05f0291c9049-utilities\") pod \"a642cd12-49f1-4784-86ce-05f0291c9049\" (UID: \"a642cd12-49f1-4784-86ce-05f0291c9049\") " Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.921500 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvvgd\" (UniqueName: \"kubernetes.io/projected/a642cd12-49f1-4784-86ce-05f0291c9049-kube-api-access-kvvgd\") pod \"a642cd12-49f1-4784-86ce-05f0291c9049\" (UID: \"a642cd12-49f1-4784-86ce-05f0291c9049\") " Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.922062 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a642cd12-49f1-4784-86ce-05f0291c9049-utilities" (OuterVolumeSpecName: "utilities") pod "a642cd12-49f1-4784-86ce-05f0291c9049" (UID: "a642cd12-49f1-4784-86ce-05f0291c9049"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.927834 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a642cd12-49f1-4784-86ce-05f0291c9049-kube-api-access-kvvgd" (OuterVolumeSpecName: "kube-api-access-kvvgd") pod "a642cd12-49f1-4784-86ce-05f0291c9049" (UID: "a642cd12-49f1-4784-86ce-05f0291c9049"). InnerVolumeSpecName "kube-api-access-kvvgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.945999 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:33 crc kubenswrapper[4644]: I1213 06:48:33.960994 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a642cd12-49f1-4784-86ce-05f0291c9049-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a642cd12-49f1-4784-86ce-05f0291c9049" (UID: "a642cd12-49f1-4784-86ce-05f0291c9049"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.023891 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvvgd\" (UniqueName: \"kubernetes.io/projected/a642cd12-49f1-4784-86ce-05f0291c9049-kube-api-access-kvvgd\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.023927 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a642cd12-49f1-4784-86ce-05f0291c9049-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.023937 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a642cd12-49f1-4784-86ce-05f0291c9049-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.177967 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6kp77" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.334646 4644 generic.go:334] "Generic (PLEG): container finished" podID="a642cd12-49f1-4784-86ce-05f0291c9049" containerID="cc051242d1d848310a2e6a6f724974f1f8e9696e031e5b2ddd8d66b43f6a394b" exitCode=0 Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.334703 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g2gkk" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.334762 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2gkk" event={"ID":"a642cd12-49f1-4784-86ce-05f0291c9049","Type":"ContainerDied","Data":"cc051242d1d848310a2e6a6f724974f1f8e9696e031e5b2ddd8d66b43f6a394b"} Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.334807 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2gkk" event={"ID":"a642cd12-49f1-4784-86ce-05f0291c9049","Type":"ContainerDied","Data":"fc5338834e455a6613b7666d3c3a3f92171ed04711a287d83167524be8979b49"} Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.334828 4644 scope.go:117] "RemoveContainer" containerID="cc051242d1d848310a2e6a6f724974f1f8e9696e031e5b2ddd8d66b43f6a394b" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.336427 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-szmwp" podUID="b8b426e8-8b8b-4168-afaa-a5cbb6752fdb" containerName="registry-server" containerID="cri-o://bc4cf461cf4903c7cbc065ecb98e341b12988233689de7f277aa554bbc352d6d" gracePeriod=2 Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.356131 4644 scope.go:117] "RemoveContainer" containerID="4deeb98aea42e1c5836ce3ef9bac861cb27ba24c851389e81b512913738e274c" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.358405 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g2gkk"] Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.361614 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-g2gkk"] Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.370534 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.371183 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.407721 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a642cd12-49f1-4784-86ce-05f0291c9049" path="/var/lib/kubelet/pods/a642cd12-49f1-4784-86ce-05f0291c9049/volumes" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.432068 4644 scope.go:117] "RemoveContainer" containerID="58b1d85a2528df4ee4f085b910fb2cf4802a703a33ddab1f9c58a125290710d7" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.451100 4644 scope.go:117] "RemoveContainer" containerID="cc051242d1d848310a2e6a6f724974f1f8e9696e031e5b2ddd8d66b43f6a394b" Dec 13 06:48:34 crc kubenswrapper[4644]: E1213 06:48:34.451797 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc051242d1d848310a2e6a6f724974f1f8e9696e031e5b2ddd8d66b43f6a394b\": container with ID starting with cc051242d1d848310a2e6a6f724974f1f8e9696e031e5b2ddd8d66b43f6a394b not found: ID does not exist" containerID="cc051242d1d848310a2e6a6f724974f1f8e9696e031e5b2ddd8d66b43f6a394b" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.451849 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc051242d1d848310a2e6a6f724974f1f8e9696e031e5b2ddd8d66b43f6a394b"} err="failed to get container status \"cc051242d1d848310a2e6a6f724974f1f8e9696e031e5b2ddd8d66b43f6a394b\": rpc error: code = NotFound desc = could not find container \"cc051242d1d848310a2e6a6f724974f1f8e9696e031e5b2ddd8d66b43f6a394b\": container with ID starting with cc051242d1d848310a2e6a6f724974f1f8e9696e031e5b2ddd8d66b43f6a394b not found: ID does not exist" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.451908 4644 scope.go:117] "RemoveContainer" containerID="4deeb98aea42e1c5836ce3ef9bac861cb27ba24c851389e81b512913738e274c" Dec 13 06:48:34 crc kubenswrapper[4644]: E1213 06:48:34.452231 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4deeb98aea42e1c5836ce3ef9bac861cb27ba24c851389e81b512913738e274c\": container with ID starting with 4deeb98aea42e1c5836ce3ef9bac861cb27ba24c851389e81b512913738e274c not found: ID does not exist" containerID="4deeb98aea42e1c5836ce3ef9bac861cb27ba24c851389e81b512913738e274c" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.452259 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4deeb98aea42e1c5836ce3ef9bac861cb27ba24c851389e81b512913738e274c"} err="failed to get container status \"4deeb98aea42e1c5836ce3ef9bac861cb27ba24c851389e81b512913738e274c\": rpc error: code = NotFound desc = could not find container \"4deeb98aea42e1c5836ce3ef9bac861cb27ba24c851389e81b512913738e274c\": container with ID starting with 4deeb98aea42e1c5836ce3ef9bac861cb27ba24c851389e81b512913738e274c not found: ID does not exist" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.452278 4644 scope.go:117] "RemoveContainer" containerID="58b1d85a2528df4ee4f085b910fb2cf4802a703a33ddab1f9c58a125290710d7" Dec 13 06:48:34 crc kubenswrapper[4644]: E1213 06:48:34.452483 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58b1d85a2528df4ee4f085b910fb2cf4802a703a33ddab1f9c58a125290710d7\": container with ID starting with 58b1d85a2528df4ee4f085b910fb2cf4802a703a33ddab1f9c58a125290710d7 not found: ID does not exist" containerID="58b1d85a2528df4ee4f085b910fb2cf4802a703a33ddab1f9c58a125290710d7" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.452502 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58b1d85a2528df4ee4f085b910fb2cf4802a703a33ddab1f9c58a125290710d7"} err="failed to get container status \"58b1d85a2528df4ee4f085b910fb2cf4802a703a33ddab1f9c58a125290710d7\": rpc error: code = NotFound desc = could not find container \"58b1d85a2528df4ee4f085b910fb2cf4802a703a33ddab1f9c58a125290710d7\": container with ID starting with 58b1d85a2528df4ee4f085b910fb2cf4802a703a33ddab1f9c58a125290710d7 not found: ID does not exist" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.712636 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.839580 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-catalog-content\") pod \"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb\" (UID: \"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb\") " Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.839880 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lfwl\" (UniqueName: \"kubernetes.io/projected/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-kube-api-access-5lfwl\") pod \"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb\" (UID: \"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb\") " Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.839955 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-utilities\") pod \"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb\" (UID: \"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb\") " Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.840828 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-utilities" (OuterVolumeSpecName: "utilities") pod "b8b426e8-8b8b-4168-afaa-a5cbb6752fdb" (UID: "b8b426e8-8b8b-4168-afaa-a5cbb6752fdb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.843479 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-kube-api-access-5lfwl" (OuterVolumeSpecName: "kube-api-access-5lfwl") pod "b8b426e8-8b8b-4168-afaa-a5cbb6752fdb" (UID: "b8b426e8-8b8b-4168-afaa-a5cbb6752fdb"). InnerVolumeSpecName "kube-api-access-5lfwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.878212 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8b426e8-8b8b-4168-afaa-a5cbb6752fdb" (UID: "b8b426e8-8b8b-4168-afaa-a5cbb6752fdb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.941586 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.941631 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:34 crc kubenswrapper[4644]: I1213 06:48:34.941647 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lfwl\" (UniqueName: \"kubernetes.io/projected/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb-kube-api-access-5lfwl\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.202076 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-27zlm"] Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.354847 4644 generic.go:334] "Generic (PLEG): container finished" podID="b8b426e8-8b8b-4168-afaa-a5cbb6752fdb" containerID="bc4cf461cf4903c7cbc065ecb98e341b12988233689de7f277aa554bbc352d6d" exitCode=0 Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.354903 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-szmwp" event={"ID":"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb","Type":"ContainerDied","Data":"bc4cf461cf4903c7cbc065ecb98e341b12988233689de7f277aa554bbc352d6d"} Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.354969 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-szmwp" event={"ID":"b8b426e8-8b8b-4168-afaa-a5cbb6752fdb","Type":"ContainerDied","Data":"0832af65481d4130cf0a515ccd58449dcb0ae02db5ad4914a6987bfc1e4e38ce"} Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.354991 4644 scope.go:117] "RemoveContainer" containerID="bc4cf461cf4903c7cbc065ecb98e341b12988233689de7f277aa554bbc352d6d" Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.354987 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-szmwp" Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.355074 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-27zlm" podUID="946372f5-3102-4e5c-b2af-03057f3b92dd" containerName="registry-server" containerID="cri-o://b79fcb7a30c6e24a2b2e5b948ef673d7d0705c03982d40f672399445a551d3b2" gracePeriod=2 Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.376055 4644 scope.go:117] "RemoveContainer" containerID="73118887678da774cbb6c2837545329f6563f7ccdf752d3d174007b70e957bde" Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.377545 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-szmwp"] Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.383544 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-szmwp"] Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.426830 4644 scope.go:117] "RemoveContainer" containerID="8fab03e287540465f8b81676df1426c08c76024946b77970fff87acdf4366a3c" Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.438500 4644 scope.go:117] "RemoveContainer" containerID="bc4cf461cf4903c7cbc065ecb98e341b12988233689de7f277aa554bbc352d6d" Dec 13 06:48:35 crc kubenswrapper[4644]: E1213 06:48:35.439084 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc4cf461cf4903c7cbc065ecb98e341b12988233689de7f277aa554bbc352d6d\": container with ID starting with bc4cf461cf4903c7cbc065ecb98e341b12988233689de7f277aa554bbc352d6d not found: ID does not exist" containerID="bc4cf461cf4903c7cbc065ecb98e341b12988233689de7f277aa554bbc352d6d" Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.439125 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc4cf461cf4903c7cbc065ecb98e341b12988233689de7f277aa554bbc352d6d"} err="failed to get container status \"bc4cf461cf4903c7cbc065ecb98e341b12988233689de7f277aa554bbc352d6d\": rpc error: code = NotFound desc = could not find container \"bc4cf461cf4903c7cbc065ecb98e341b12988233689de7f277aa554bbc352d6d\": container with ID starting with bc4cf461cf4903c7cbc065ecb98e341b12988233689de7f277aa554bbc352d6d not found: ID does not exist" Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.439171 4644 scope.go:117] "RemoveContainer" containerID="73118887678da774cbb6c2837545329f6563f7ccdf752d3d174007b70e957bde" Dec 13 06:48:35 crc kubenswrapper[4644]: E1213 06:48:35.439521 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73118887678da774cbb6c2837545329f6563f7ccdf752d3d174007b70e957bde\": container with ID starting with 73118887678da774cbb6c2837545329f6563f7ccdf752d3d174007b70e957bde not found: ID does not exist" containerID="73118887678da774cbb6c2837545329f6563f7ccdf752d3d174007b70e957bde" Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.439550 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73118887678da774cbb6c2837545329f6563f7ccdf752d3d174007b70e957bde"} err="failed to get container status \"73118887678da774cbb6c2837545329f6563f7ccdf752d3d174007b70e957bde\": rpc error: code = NotFound desc = could not find container \"73118887678da774cbb6c2837545329f6563f7ccdf752d3d174007b70e957bde\": container with ID starting with 73118887678da774cbb6c2837545329f6563f7ccdf752d3d174007b70e957bde not found: ID does not exist" Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.439591 4644 scope.go:117] "RemoveContainer" containerID="8fab03e287540465f8b81676df1426c08c76024946b77970fff87acdf4366a3c" Dec 13 06:48:35 crc kubenswrapper[4644]: E1213 06:48:35.439817 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fab03e287540465f8b81676df1426c08c76024946b77970fff87acdf4366a3c\": container with ID starting with 8fab03e287540465f8b81676df1426c08c76024946b77970fff87acdf4366a3c not found: ID does not exist" containerID="8fab03e287540465f8b81676df1426c08c76024946b77970fff87acdf4366a3c" Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.439845 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fab03e287540465f8b81676df1426c08c76024946b77970fff87acdf4366a3c"} err="failed to get container status \"8fab03e287540465f8b81676df1426c08c76024946b77970fff87acdf4366a3c\": rpc error: code = NotFound desc = could not find container \"8fab03e287540465f8b81676df1426c08c76024946b77970fff87acdf4366a3c\": container with ID starting with 8fab03e287540465f8b81676df1426c08c76024946b77970fff87acdf4366a3c not found: ID does not exist" Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.768376 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.953452 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tg8t\" (UniqueName: \"kubernetes.io/projected/946372f5-3102-4e5c-b2af-03057f3b92dd-kube-api-access-7tg8t\") pod \"946372f5-3102-4e5c-b2af-03057f3b92dd\" (UID: \"946372f5-3102-4e5c-b2af-03057f3b92dd\") " Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.953523 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/946372f5-3102-4e5c-b2af-03057f3b92dd-catalog-content\") pod \"946372f5-3102-4e5c-b2af-03057f3b92dd\" (UID: \"946372f5-3102-4e5c-b2af-03057f3b92dd\") " Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.953551 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/946372f5-3102-4e5c-b2af-03057f3b92dd-utilities\") pod \"946372f5-3102-4e5c-b2af-03057f3b92dd\" (UID: \"946372f5-3102-4e5c-b2af-03057f3b92dd\") " Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.954175 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/946372f5-3102-4e5c-b2af-03057f3b92dd-utilities" (OuterVolumeSpecName: "utilities") pod "946372f5-3102-4e5c-b2af-03057f3b92dd" (UID: "946372f5-3102-4e5c-b2af-03057f3b92dd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.956200 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/946372f5-3102-4e5c-b2af-03057f3b92dd-kube-api-access-7tg8t" (OuterVolumeSpecName: "kube-api-access-7tg8t") pod "946372f5-3102-4e5c-b2af-03057f3b92dd" (UID: "946372f5-3102-4e5c-b2af-03057f3b92dd"). InnerVolumeSpecName "kube-api-access-7tg8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:48:35 crc kubenswrapper[4644]: I1213 06:48:35.968532 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/946372f5-3102-4e5c-b2af-03057f3b92dd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "946372f5-3102-4e5c-b2af-03057f3b92dd" (UID: "946372f5-3102-4e5c-b2af-03057f3b92dd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.055692 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/946372f5-3102-4e5c-b2af-03057f3b92dd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.055730 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/946372f5-3102-4e5c-b2af-03057f3b92dd-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.055742 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tg8t\" (UniqueName: \"kubernetes.io/projected/946372f5-3102-4e5c-b2af-03057f3b92dd-kube-api-access-7tg8t\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.363169 4644 generic.go:334] "Generic (PLEG): container finished" podID="946372f5-3102-4e5c-b2af-03057f3b92dd" containerID="b79fcb7a30c6e24a2b2e5b948ef673d7d0705c03982d40f672399445a551d3b2" exitCode=0 Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.363230 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27zlm" event={"ID":"946372f5-3102-4e5c-b2af-03057f3b92dd","Type":"ContainerDied","Data":"b79fcb7a30c6e24a2b2e5b948ef673d7d0705c03982d40f672399445a551d3b2"} Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.363257 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27zlm" event={"ID":"946372f5-3102-4e5c-b2af-03057f3b92dd","Type":"ContainerDied","Data":"ab875e1b89043f085d5eeef3cd12d49b154092c444c0c5f27520961a985d6dce"} Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.363275 4644 scope.go:117] "RemoveContainer" containerID="b79fcb7a30c6e24a2b2e5b948ef673d7d0705c03982d40f672399445a551d3b2" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.363827 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-27zlm" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.375652 4644 scope.go:117] "RemoveContainer" containerID="99573b75f1c5839caa15e2425ecc5fd50aeede653a85f18920bfd1b8813a891d" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.384778 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-27zlm"] Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.387934 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-27zlm"] Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.392567 4644 scope.go:117] "RemoveContainer" containerID="b751b472d8750e81413ff965ddbceb991003acaa076137d13ddd018b0ec99fe3" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.394412 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="946372f5-3102-4e5c-b2af-03057f3b92dd" path="/var/lib/kubelet/pods/946372f5-3102-4e5c-b2af-03057f3b92dd/volumes" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.395041 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8b426e8-8b8b-4168-afaa-a5cbb6752fdb" path="/var/lib/kubelet/pods/b8b426e8-8b8b-4168-afaa-a5cbb6752fdb/volumes" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.417103 4644 scope.go:117] "RemoveContainer" containerID="b79fcb7a30c6e24a2b2e5b948ef673d7d0705c03982d40f672399445a551d3b2" Dec 13 06:48:36 crc kubenswrapper[4644]: E1213 06:48:36.417496 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b79fcb7a30c6e24a2b2e5b948ef673d7d0705c03982d40f672399445a551d3b2\": container with ID starting with b79fcb7a30c6e24a2b2e5b948ef673d7d0705c03982d40f672399445a551d3b2 not found: ID does not exist" containerID="b79fcb7a30c6e24a2b2e5b948ef673d7d0705c03982d40f672399445a551d3b2" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.417543 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b79fcb7a30c6e24a2b2e5b948ef673d7d0705c03982d40f672399445a551d3b2"} err="failed to get container status \"b79fcb7a30c6e24a2b2e5b948ef673d7d0705c03982d40f672399445a551d3b2\": rpc error: code = NotFound desc = could not find container \"b79fcb7a30c6e24a2b2e5b948ef673d7d0705c03982d40f672399445a551d3b2\": container with ID starting with b79fcb7a30c6e24a2b2e5b948ef673d7d0705c03982d40f672399445a551d3b2 not found: ID does not exist" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.417573 4644 scope.go:117] "RemoveContainer" containerID="99573b75f1c5839caa15e2425ecc5fd50aeede653a85f18920bfd1b8813a891d" Dec 13 06:48:36 crc kubenswrapper[4644]: E1213 06:48:36.418259 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99573b75f1c5839caa15e2425ecc5fd50aeede653a85f18920bfd1b8813a891d\": container with ID starting with 99573b75f1c5839caa15e2425ecc5fd50aeede653a85f18920bfd1b8813a891d not found: ID does not exist" containerID="99573b75f1c5839caa15e2425ecc5fd50aeede653a85f18920bfd1b8813a891d" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.418288 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99573b75f1c5839caa15e2425ecc5fd50aeede653a85f18920bfd1b8813a891d"} err="failed to get container status \"99573b75f1c5839caa15e2425ecc5fd50aeede653a85f18920bfd1b8813a891d\": rpc error: code = NotFound desc = could not find container \"99573b75f1c5839caa15e2425ecc5fd50aeede653a85f18920bfd1b8813a891d\": container with ID starting with 99573b75f1c5839caa15e2425ecc5fd50aeede653a85f18920bfd1b8813a891d not found: ID does not exist" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.418310 4644 scope.go:117] "RemoveContainer" containerID="b751b472d8750e81413ff965ddbceb991003acaa076137d13ddd018b0ec99fe3" Dec 13 06:48:36 crc kubenswrapper[4644]: E1213 06:48:36.418989 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b751b472d8750e81413ff965ddbceb991003acaa076137d13ddd018b0ec99fe3\": container with ID starting with b751b472d8750e81413ff965ddbceb991003acaa076137d13ddd018b0ec99fe3 not found: ID does not exist" containerID="b751b472d8750e81413ff965ddbceb991003acaa076137d13ddd018b0ec99fe3" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.419016 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b751b472d8750e81413ff965ddbceb991003acaa076137d13ddd018b0ec99fe3"} err="failed to get container status \"b751b472d8750e81413ff965ddbceb991003acaa076137d13ddd018b0ec99fe3\": rpc error: code = NotFound desc = could not find container \"b751b472d8750e81413ff965ddbceb991003acaa076137d13ddd018b0ec99fe3\": container with ID starting with b751b472d8750e81413ff965ddbceb991003acaa076137d13ddd018b0ec99fe3 not found: ID does not exist" Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.636721 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-78bc4cb977-c5l6c"] Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.636981 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" podUID="079619c8-cbc6-485e-a90d-72b905d379b0" containerName="controller-manager" containerID="cri-o://d75d3f21936747976edca09dcc6db919a9a1b1f39f3f02ed114243e2e0255416" gracePeriod=30 Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.732044 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr"] Dec 13 06:48:36 crc kubenswrapper[4644]: I1213 06:48:36.732251 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" podUID="8aae84d1-53df-46e4-9275-cff9d6af6a81" containerName="route-controller-manager" containerID="cri-o://e7afef56a44260eff728a3e9d8f5ce2ac2a5132680d6cf3c85c27b9c147665dd" gracePeriod=30 Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.102716 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.108951 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.184092 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-proxy-ca-bundles\") pod \"079619c8-cbc6-485e-a90d-72b905d379b0\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.184161 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7h7b\" (UniqueName: \"kubernetes.io/projected/079619c8-cbc6-485e-a90d-72b905d379b0-kube-api-access-t7h7b\") pod \"079619c8-cbc6-485e-a90d-72b905d379b0\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.184199 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8aae84d1-53df-46e4-9275-cff9d6af6a81-config\") pod \"8aae84d1-53df-46e4-9275-cff9d6af6a81\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.184285 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8aae84d1-53df-46e4-9275-cff9d6af6a81-client-ca\") pod \"8aae84d1-53df-46e4-9275-cff9d6af6a81\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.184323 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lslv\" (UniqueName: \"kubernetes.io/projected/8aae84d1-53df-46e4-9275-cff9d6af6a81-kube-api-access-7lslv\") pod \"8aae84d1-53df-46e4-9275-cff9d6af6a81\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.184357 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-config\") pod \"079619c8-cbc6-485e-a90d-72b905d379b0\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.184374 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-client-ca\") pod \"079619c8-cbc6-485e-a90d-72b905d379b0\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.184394 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8aae84d1-53df-46e4-9275-cff9d6af6a81-serving-cert\") pod \"8aae84d1-53df-46e4-9275-cff9d6af6a81\" (UID: \"8aae84d1-53df-46e4-9275-cff9d6af6a81\") " Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.184418 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/079619c8-cbc6-485e-a90d-72b905d379b0-serving-cert\") pod \"079619c8-cbc6-485e-a90d-72b905d379b0\" (UID: \"079619c8-cbc6-485e-a90d-72b905d379b0\") " Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.184786 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "079619c8-cbc6-485e-a90d-72b905d379b0" (UID: "079619c8-cbc6-485e-a90d-72b905d379b0"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.184970 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-config" (OuterVolumeSpecName: "config") pod "079619c8-cbc6-485e-a90d-72b905d379b0" (UID: "079619c8-cbc6-485e-a90d-72b905d379b0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.185018 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-client-ca" (OuterVolumeSpecName: "client-ca") pod "079619c8-cbc6-485e-a90d-72b905d379b0" (UID: "079619c8-cbc6-485e-a90d-72b905d379b0"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.185035 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8aae84d1-53df-46e4-9275-cff9d6af6a81-config" (OuterVolumeSpecName: "config") pod "8aae84d1-53df-46e4-9275-cff9d6af6a81" (UID: "8aae84d1-53df-46e4-9275-cff9d6af6a81"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.185326 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8aae84d1-53df-46e4-9275-cff9d6af6a81-client-ca" (OuterVolumeSpecName: "client-ca") pod "8aae84d1-53df-46e4-9275-cff9d6af6a81" (UID: "8aae84d1-53df-46e4-9275-cff9d6af6a81"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.188833 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8aae84d1-53df-46e4-9275-cff9d6af6a81-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8aae84d1-53df-46e4-9275-cff9d6af6a81" (UID: "8aae84d1-53df-46e4-9275-cff9d6af6a81"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.189662 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8aae84d1-53df-46e4-9275-cff9d6af6a81-kube-api-access-7lslv" (OuterVolumeSpecName: "kube-api-access-7lslv") pod "8aae84d1-53df-46e4-9275-cff9d6af6a81" (UID: "8aae84d1-53df-46e4-9275-cff9d6af6a81"). InnerVolumeSpecName "kube-api-access-7lslv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.189753 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/079619c8-cbc6-485e-a90d-72b905d379b0-kube-api-access-t7h7b" (OuterVolumeSpecName: "kube-api-access-t7h7b") pod "079619c8-cbc6-485e-a90d-72b905d379b0" (UID: "079619c8-cbc6-485e-a90d-72b905d379b0"). InnerVolumeSpecName "kube-api-access-t7h7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.190047 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/079619c8-cbc6-485e-a90d-72b905d379b0-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "079619c8-cbc6-485e-a90d-72b905d379b0" (UID: "079619c8-cbc6-485e-a90d-72b905d379b0"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.286148 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/079619c8-cbc6-485e-a90d-72b905d379b0-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.286184 4644 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.286198 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7h7b\" (UniqueName: \"kubernetes.io/projected/079619c8-cbc6-485e-a90d-72b905d379b0-kube-api-access-t7h7b\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.286210 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8aae84d1-53df-46e4-9275-cff9d6af6a81-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.286217 4644 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8aae84d1-53df-46e4-9275-cff9d6af6a81-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.286226 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lslv\" (UniqueName: \"kubernetes.io/projected/8aae84d1-53df-46e4-9275-cff9d6af6a81-kube-api-access-7lslv\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.286234 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.286241 4644 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/079619c8-cbc6-485e-a90d-72b905d379b0-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.286248 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8aae84d1-53df-46e4-9275-cff9d6af6a81-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.371860 4644 generic.go:334] "Generic (PLEG): container finished" podID="8aae84d1-53df-46e4-9275-cff9d6af6a81" containerID="e7afef56a44260eff728a3e9d8f5ce2ac2a5132680d6cf3c85c27b9c147665dd" exitCode=0 Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.371925 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" event={"ID":"8aae84d1-53df-46e4-9275-cff9d6af6a81","Type":"ContainerDied","Data":"e7afef56a44260eff728a3e9d8f5ce2ac2a5132680d6cf3c85c27b9c147665dd"} Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.371951 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" event={"ID":"8aae84d1-53df-46e4-9275-cff9d6af6a81","Type":"ContainerDied","Data":"b33e654e6fda94e0cca3e58b8f9d80302f6719108fcc7b5c56232231a7039743"} Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.371968 4644 scope.go:117] "RemoveContainer" containerID="e7afef56a44260eff728a3e9d8f5ce2ac2a5132680d6cf3c85c27b9c147665dd" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.372035 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.374869 4644 generic.go:334] "Generic (PLEG): container finished" podID="079619c8-cbc6-485e-a90d-72b905d379b0" containerID="d75d3f21936747976edca09dcc6db919a9a1b1f39f3f02ed114243e2e0255416" exitCode=0 Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.374936 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" event={"ID":"079619c8-cbc6-485e-a90d-72b905d379b0","Type":"ContainerDied","Data":"d75d3f21936747976edca09dcc6db919a9a1b1f39f3f02ed114243e2e0255416"} Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.374969 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" event={"ID":"079619c8-cbc6-485e-a90d-72b905d379b0","Type":"ContainerDied","Data":"6fe061a49ce09552d141e0684405e6eca0fcef4eb05927f69dd6aaba9828c53f"} Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.375098 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-78bc4cb977-c5l6c" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.394278 4644 scope.go:117] "RemoveContainer" containerID="e7afef56a44260eff728a3e9d8f5ce2ac2a5132680d6cf3c85c27b9c147665dd" Dec 13 06:48:37 crc kubenswrapper[4644]: E1213 06:48:37.396302 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7afef56a44260eff728a3e9d8f5ce2ac2a5132680d6cf3c85c27b9c147665dd\": container with ID starting with e7afef56a44260eff728a3e9d8f5ce2ac2a5132680d6cf3c85c27b9c147665dd not found: ID does not exist" containerID="e7afef56a44260eff728a3e9d8f5ce2ac2a5132680d6cf3c85c27b9c147665dd" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.396352 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7afef56a44260eff728a3e9d8f5ce2ac2a5132680d6cf3c85c27b9c147665dd"} err="failed to get container status \"e7afef56a44260eff728a3e9d8f5ce2ac2a5132680d6cf3c85c27b9c147665dd\": rpc error: code = NotFound desc = could not find container \"e7afef56a44260eff728a3e9d8f5ce2ac2a5132680d6cf3c85c27b9c147665dd\": container with ID starting with e7afef56a44260eff728a3e9d8f5ce2ac2a5132680d6cf3c85c27b9c147665dd not found: ID does not exist" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.396375 4644 scope.go:117] "RemoveContainer" containerID="d75d3f21936747976edca09dcc6db919a9a1b1f39f3f02ed114243e2e0255416" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.397178 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr"] Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.398884 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7c588587d7-jn9zr"] Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.403715 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-78bc4cb977-c5l6c"] Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.406162 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-78bc4cb977-c5l6c"] Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.409803 4644 scope.go:117] "RemoveContainer" containerID="d75d3f21936747976edca09dcc6db919a9a1b1f39f3f02ed114243e2e0255416" Dec 13 06:48:37 crc kubenswrapper[4644]: E1213 06:48:37.410179 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d75d3f21936747976edca09dcc6db919a9a1b1f39f3f02ed114243e2e0255416\": container with ID starting with d75d3f21936747976edca09dcc6db919a9a1b1f39f3f02ed114243e2e0255416 not found: ID does not exist" containerID="d75d3f21936747976edca09dcc6db919a9a1b1f39f3f02ed114243e2e0255416" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.410205 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d75d3f21936747976edca09dcc6db919a9a1b1f39f3f02ed114243e2e0255416"} err="failed to get container status \"d75d3f21936747976edca09dcc6db919a9a1b1f39f3f02ed114243e2e0255416\": rpc error: code = NotFound desc = could not find container \"d75d3f21936747976edca09dcc6db919a9a1b1f39f3f02ed114243e2e0255416\": container with ID starting with d75d3f21936747976edca09dcc6db919a9a1b1f39f3f02ed114243e2e0255416 not found: ID does not exist" Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.614931 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-76whk"] Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.615138 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-76whk" podUID="1e3eddf5-6aec-4c15-8ae7-a7258d60be4e" containerName="registry-server" containerID="cri-o://b159ba4d6272b401e619a08420e527c0433964b03a53534e281942b598f77596" gracePeriod=2 Dec 13 06:48:37 crc kubenswrapper[4644]: I1213 06:48:37.923889 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.008895 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-catalog-content\") pod \"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e\" (UID: \"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e\") " Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.008990 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-utilities\") pod \"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e\" (UID: \"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e\") " Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.009062 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c47q5\" (UniqueName: \"kubernetes.io/projected/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-kube-api-access-c47q5\") pod \"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e\" (UID: \"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e\") " Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.010021 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-utilities" (OuterVolumeSpecName: "utilities") pod "1e3eddf5-6aec-4c15-8ae7-a7258d60be4e" (UID: "1e3eddf5-6aec-4c15-8ae7-a7258d60be4e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.012430 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-kube-api-access-c47q5" (OuterVolumeSpecName: "kube-api-access-c47q5") pod "1e3eddf5-6aec-4c15-8ae7-a7258d60be4e" (UID: "1e3eddf5-6aec-4c15-8ae7-a7258d60be4e"). InnerVolumeSpecName "kube-api-access-c47q5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.097250 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1e3eddf5-6aec-4c15-8ae7-a7258d60be4e" (UID: "1e3eddf5-6aec-4c15-8ae7-a7258d60be4e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.111974 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c47q5\" (UniqueName: \"kubernetes.io/projected/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-kube-api-access-c47q5\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.112022 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.112036 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.396902 4644 generic.go:334] "Generic (PLEG): container finished" podID="1e3eddf5-6aec-4c15-8ae7-a7258d60be4e" containerID="b159ba4d6272b401e619a08420e527c0433964b03a53534e281942b598f77596" exitCode=0 Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.397008 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-76whk" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.397016 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="079619c8-cbc6-485e-a90d-72b905d379b0" path="/var/lib/kubelet/pods/079619c8-cbc6-485e-a90d-72b905d379b0/volumes" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.397964 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8aae84d1-53df-46e4-9275-cff9d6af6a81" path="/var/lib/kubelet/pods/8aae84d1-53df-46e4-9275-cff9d6af6a81/volumes" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.398472 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-76whk" event={"ID":"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e","Type":"ContainerDied","Data":"b159ba4d6272b401e619a08420e527c0433964b03a53534e281942b598f77596"} Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.398501 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-76whk" event={"ID":"1e3eddf5-6aec-4c15-8ae7-a7258d60be4e","Type":"ContainerDied","Data":"278812383a31cbe8c7aebda128041970bc1f9109f277592a1b1416fd27928d5e"} Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.398519 4644 scope.go:117] "RemoveContainer" containerID="b159ba4d6272b401e619a08420e527c0433964b03a53534e281942b598f77596" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.419724 4644 scope.go:117] "RemoveContainer" containerID="875705816c6647192063f6df04df9c18137e28d9672b1e4aac6a921abba82cdf" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.431222 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-76whk"] Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.433605 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-76whk"] Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.455941 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-569cc974b5-64rdj"] Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.456119 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a642cd12-49f1-4784-86ce-05f0291c9049" containerName="extract-utilities" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456131 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="a642cd12-49f1-4784-86ce-05f0291c9049" containerName="extract-utilities" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.456142 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a642cd12-49f1-4784-86ce-05f0291c9049" containerName="registry-server" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456149 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="a642cd12-49f1-4784-86ce-05f0291c9049" containerName="registry-server" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.456157 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="946372f5-3102-4e5c-b2af-03057f3b92dd" containerName="extract-content" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456162 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="946372f5-3102-4e5c-b2af-03057f3b92dd" containerName="extract-content" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.456173 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="079619c8-cbc6-485e-a90d-72b905d379b0" containerName="controller-manager" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456178 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="079619c8-cbc6-485e-a90d-72b905d379b0" containerName="controller-manager" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.456183 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aae84d1-53df-46e4-9275-cff9d6af6a81" containerName="route-controller-manager" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456189 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aae84d1-53df-46e4-9275-cff9d6af6a81" containerName="route-controller-manager" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.456196 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="946372f5-3102-4e5c-b2af-03057f3b92dd" containerName="extract-utilities" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456201 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="946372f5-3102-4e5c-b2af-03057f3b92dd" containerName="extract-utilities" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.456207 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8b426e8-8b8b-4168-afaa-a5cbb6752fdb" containerName="extract-content" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456213 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8b426e8-8b8b-4168-afaa-a5cbb6752fdb" containerName="extract-content" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.456220 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8b426e8-8b8b-4168-afaa-a5cbb6752fdb" containerName="extract-utilities" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456226 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8b426e8-8b8b-4168-afaa-a5cbb6752fdb" containerName="extract-utilities" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.456235 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="946372f5-3102-4e5c-b2af-03057f3b92dd" containerName="registry-server" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456240 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="946372f5-3102-4e5c-b2af-03057f3b92dd" containerName="registry-server" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.456248 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e3eddf5-6aec-4c15-8ae7-a7258d60be4e" containerName="registry-server" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456253 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e3eddf5-6aec-4c15-8ae7-a7258d60be4e" containerName="registry-server" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.456262 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8b426e8-8b8b-4168-afaa-a5cbb6752fdb" containerName="registry-server" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456267 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8b426e8-8b8b-4168-afaa-a5cbb6752fdb" containerName="registry-server" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.456275 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e3eddf5-6aec-4c15-8ae7-a7258d60be4e" containerName="extract-utilities" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456280 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e3eddf5-6aec-4c15-8ae7-a7258d60be4e" containerName="extract-utilities" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.456288 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a642cd12-49f1-4784-86ce-05f0291c9049" containerName="extract-content" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456293 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="a642cd12-49f1-4784-86ce-05f0291c9049" containerName="extract-content" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.456301 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e3eddf5-6aec-4c15-8ae7-a7258d60be4e" containerName="extract-content" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456306 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e3eddf5-6aec-4c15-8ae7-a7258d60be4e" containerName="extract-content" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456414 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8b426e8-8b8b-4168-afaa-a5cbb6752fdb" containerName="registry-server" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456426 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="a642cd12-49f1-4784-86ce-05f0291c9049" containerName="registry-server" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456434 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="8aae84d1-53df-46e4-9275-cff9d6af6a81" containerName="route-controller-manager" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456458 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="079619c8-cbc6-485e-a90d-72b905d379b0" containerName="controller-manager" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456465 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="946372f5-3102-4e5c-b2af-03057f3b92dd" containerName="registry-server" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456472 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e3eddf5-6aec-4c15-8ae7-a7258d60be4e" containerName="registry-server" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.456884 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.461302 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.461560 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.461680 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.462175 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.462286 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.462510 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.464962 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx"] Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.465613 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.467123 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.467501 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.467655 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.468061 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-569cc974b5-64rdj"] Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.468279 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.468397 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.468518 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.470137 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx"] Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.470281 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.474678 4644 scope.go:117] "RemoveContainer" containerID="d9aef0c6879c4d0039d2e53e6758263f48b6809bc90c2cb3c7d339fa11c98f6d" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.488972 4644 scope.go:117] "RemoveContainer" containerID="b159ba4d6272b401e619a08420e527c0433964b03a53534e281942b598f77596" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.493787 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b159ba4d6272b401e619a08420e527c0433964b03a53534e281942b598f77596\": container with ID starting with b159ba4d6272b401e619a08420e527c0433964b03a53534e281942b598f77596 not found: ID does not exist" containerID="b159ba4d6272b401e619a08420e527c0433964b03a53534e281942b598f77596" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.493828 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b159ba4d6272b401e619a08420e527c0433964b03a53534e281942b598f77596"} err="failed to get container status \"b159ba4d6272b401e619a08420e527c0433964b03a53534e281942b598f77596\": rpc error: code = NotFound desc = could not find container \"b159ba4d6272b401e619a08420e527c0433964b03a53534e281942b598f77596\": container with ID starting with b159ba4d6272b401e619a08420e527c0433964b03a53534e281942b598f77596 not found: ID does not exist" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.493859 4644 scope.go:117] "RemoveContainer" containerID="875705816c6647192063f6df04df9c18137e28d9672b1e4aac6a921abba82cdf" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.494138 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"875705816c6647192063f6df04df9c18137e28d9672b1e4aac6a921abba82cdf\": container with ID starting with 875705816c6647192063f6df04df9c18137e28d9672b1e4aac6a921abba82cdf not found: ID does not exist" containerID="875705816c6647192063f6df04df9c18137e28d9672b1e4aac6a921abba82cdf" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.494161 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"875705816c6647192063f6df04df9c18137e28d9672b1e4aac6a921abba82cdf"} err="failed to get container status \"875705816c6647192063f6df04df9c18137e28d9672b1e4aac6a921abba82cdf\": rpc error: code = NotFound desc = could not find container \"875705816c6647192063f6df04df9c18137e28d9672b1e4aac6a921abba82cdf\": container with ID starting with 875705816c6647192063f6df04df9c18137e28d9672b1e4aac6a921abba82cdf not found: ID does not exist" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.494180 4644 scope.go:117] "RemoveContainer" containerID="d9aef0c6879c4d0039d2e53e6758263f48b6809bc90c2cb3c7d339fa11c98f6d" Dec 13 06:48:38 crc kubenswrapper[4644]: E1213 06:48:38.495294 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9aef0c6879c4d0039d2e53e6758263f48b6809bc90c2cb3c7d339fa11c98f6d\": container with ID starting with d9aef0c6879c4d0039d2e53e6758263f48b6809bc90c2cb3c7d339fa11c98f6d not found: ID does not exist" containerID="d9aef0c6879c4d0039d2e53e6758263f48b6809bc90c2cb3c7d339fa11c98f6d" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.495321 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9aef0c6879c4d0039d2e53e6758263f48b6809bc90c2cb3c7d339fa11c98f6d"} err="failed to get container status \"d9aef0c6879c4d0039d2e53e6758263f48b6809bc90c2cb3c7d339fa11c98f6d\": rpc error: code = NotFound desc = could not find container \"d9aef0c6879c4d0039d2e53e6758263f48b6809bc90c2cb3c7d339fa11c98f6d\": container with ID starting with d9aef0c6879c4d0039d2e53e6758263f48b6809bc90c2cb3c7d339fa11c98f6d not found: ID does not exist" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.515303 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klxdq\" (UniqueName: \"kubernetes.io/projected/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-kube-api-access-klxdq\") pod \"route-controller-manager-846d56594-pvrgx\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.515351 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b935d3db-f30a-4476-87ae-47e997c8716c-serving-cert\") pod \"controller-manager-569cc974b5-64rdj\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.515397 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-proxy-ca-bundles\") pod \"controller-manager-569cc974b5-64rdj\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.515498 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrw9v\" (UniqueName: \"kubernetes.io/projected/b935d3db-f30a-4476-87ae-47e997c8716c-kube-api-access-rrw9v\") pod \"controller-manager-569cc974b5-64rdj\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.515528 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-config\") pod \"route-controller-manager-846d56594-pvrgx\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.515547 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-serving-cert\") pod \"route-controller-manager-846d56594-pvrgx\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.515574 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-client-ca\") pod \"controller-manager-569cc974b5-64rdj\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.515593 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-config\") pod \"controller-manager-569cc974b5-64rdj\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.515656 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-client-ca\") pod \"route-controller-manager-846d56594-pvrgx\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.617035 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b935d3db-f30a-4476-87ae-47e997c8716c-serving-cert\") pod \"controller-manager-569cc974b5-64rdj\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.617088 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-proxy-ca-bundles\") pod \"controller-manager-569cc974b5-64rdj\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.617136 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrw9v\" (UniqueName: \"kubernetes.io/projected/b935d3db-f30a-4476-87ae-47e997c8716c-kube-api-access-rrw9v\") pod \"controller-manager-569cc974b5-64rdj\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.617157 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-config\") pod \"route-controller-manager-846d56594-pvrgx\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.617174 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-serving-cert\") pod \"route-controller-manager-846d56594-pvrgx\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.617194 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-client-ca\") pod \"controller-manager-569cc974b5-64rdj\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.617208 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-config\") pod \"controller-manager-569cc974b5-64rdj\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.617246 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-client-ca\") pod \"route-controller-manager-846d56594-pvrgx\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.617273 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klxdq\" (UniqueName: \"kubernetes.io/projected/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-kube-api-access-klxdq\") pod \"route-controller-manager-846d56594-pvrgx\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.618235 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-client-ca\") pod \"route-controller-manager-846d56594-pvrgx\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.618521 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-config\") pod \"route-controller-manager-846d56594-pvrgx\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.618816 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-client-ca\") pod \"controller-manager-569cc974b5-64rdj\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.619260 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-config\") pod \"controller-manager-569cc974b5-64rdj\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.620113 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-proxy-ca-bundles\") pod \"controller-manager-569cc974b5-64rdj\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.621669 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b935d3db-f30a-4476-87ae-47e997c8716c-serving-cert\") pod \"controller-manager-569cc974b5-64rdj\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.621703 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-serving-cert\") pod \"route-controller-manager-846d56594-pvrgx\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.630326 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrw9v\" (UniqueName: \"kubernetes.io/projected/b935d3db-f30a-4476-87ae-47e997c8716c-kube-api-access-rrw9v\") pod \"controller-manager-569cc974b5-64rdj\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.630585 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klxdq\" (UniqueName: \"kubernetes.io/projected/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-kube-api-access-klxdq\") pod \"route-controller-manager-846d56594-pvrgx\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.784561 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.806123 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:38 crc kubenswrapper[4644]: I1213 06:48:38.972178 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx"] Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.133031 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-569cc974b5-64rdj"] Dec 13 06:48:39 crc kubenswrapper[4644]: W1213 06:48:39.139516 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb935d3db_f30a_4476_87ae_47e997c8716c.slice/crio-d117b747ab903e4039a6add61f4f9928de749f1158815422026b5c1325573073 WatchSource:0}: Error finding container d117b747ab903e4039a6add61f4f9928de749f1158815422026b5c1325573073: Status 404 returned error can't find the container with id d117b747ab903e4039a6add61f4f9928de749f1158815422026b5c1325573073 Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.392574 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.393148 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.395287 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.395426 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.402504 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.409175 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" event={"ID":"b935d3db-f30a-4476-87ae-47e997c8716c","Type":"ContainerStarted","Data":"6d60980f44b670850b93080185dd10b3b16e8911358f37a355975814b8584ee0"} Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.409207 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" event={"ID":"b935d3db-f30a-4476-87ae-47e997c8716c","Type":"ContainerStarted","Data":"d117b747ab903e4039a6add61f4f9928de749f1158815422026b5c1325573073"} Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.410012 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.414770 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" event={"ID":"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345","Type":"ContainerStarted","Data":"e8fce88780e1ee0ec139751e5c10d3213ef696f5a32c039d441953d79656b90c"} Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.414816 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" event={"ID":"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345","Type":"ContainerStarted","Data":"e6d8982098279e5c3c11ec28e99c0b7b2ff45c4cd0b781ce9e84e1df8c8ec812"} Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.415561 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.416101 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.426544 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30815e6b-8ea4-411c-9f27-7505e2ec2d98-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"30815e6b-8ea4-411c-9f27-7505e2ec2d98\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.426676 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/30815e6b-8ea4-411c-9f27-7505e2ec2d98-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"30815e6b-8ea4-411c-9f27-7505e2ec2d98\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 06:48:39 crc kubenswrapper[4644]: E1213 06:48:39.429689 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e3eddf5_6aec_4c15_8ae7_a7258d60be4e.slice/crio-278812383a31cbe8c7aebda128041970bc1f9109f277592a1b1416fd27928d5e\": RecentStats: unable to find data in memory cache]" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.432200 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" podStartSLOduration=3.432181569 podStartE2EDuration="3.432181569s" podCreationTimestamp="2025-12-13 06:48:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:48:39.429619731 +0000 UTC m=+181.644570565" watchObservedRunningTime="2025-12-13 06:48:39.432181569 +0000 UTC m=+181.647132402" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.454918 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" podStartSLOduration=3.45489708 podStartE2EDuration="3.45489708s" podCreationTimestamp="2025-12-13 06:48:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:48:39.453651266 +0000 UTC m=+181.668602099" watchObservedRunningTime="2025-12-13 06:48:39.45489708 +0000 UTC m=+181.669847913" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.527750 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30815e6b-8ea4-411c-9f27-7505e2ec2d98-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"30815e6b-8ea4-411c-9f27-7505e2ec2d98\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.528740 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/30815e6b-8ea4-411c-9f27-7505e2ec2d98-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"30815e6b-8ea4-411c-9f27-7505e2ec2d98\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.528886 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/30815e6b-8ea4-411c-9f27-7505e2ec2d98-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"30815e6b-8ea4-411c-9f27-7505e2ec2d98\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.546191 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30815e6b-8ea4-411c-9f27-7505e2ec2d98-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"30815e6b-8ea4-411c-9f27-7505e2ec2d98\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.594736 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.706960 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.754237 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 06:48:39 crc kubenswrapper[4644]: I1213 06:48:39.754289 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 06:48:40 crc kubenswrapper[4644]: I1213 06:48:40.057253 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 13 06:48:40 crc kubenswrapper[4644]: W1213 06:48:40.064802 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod30815e6b_8ea4_411c_9f27_7505e2ec2d98.slice/crio-d737b909745b13c419606b428e1ce159cf12e2c7e737e50b39cef4a18ab247e7 WatchSource:0}: Error finding container d737b909745b13c419606b428e1ce159cf12e2c7e737e50b39cef4a18ab247e7: Status 404 returned error can't find the container with id d737b909745b13c419606b428e1ce159cf12e2c7e737e50b39cef4a18ab247e7 Dec 13 06:48:40 crc kubenswrapper[4644]: I1213 06:48:40.395971 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e3eddf5-6aec-4c15-8ae7-a7258d60be4e" path="/var/lib/kubelet/pods/1e3eddf5-6aec-4c15-8ae7-a7258d60be4e/volumes" Dec 13 06:48:40 crc kubenswrapper[4644]: I1213 06:48:40.431079 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"30815e6b-8ea4-411c-9f27-7505e2ec2d98","Type":"ContainerStarted","Data":"2fa1b3307cee9457ae728dec4e208838ca3043f55b96e5927442180a8fa909f8"} Dec 13 06:48:40 crc kubenswrapper[4644]: I1213 06:48:40.431123 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"30815e6b-8ea4-411c-9f27-7505e2ec2d98","Type":"ContainerStarted","Data":"d737b909745b13c419606b428e1ce159cf12e2c7e737e50b39cef4a18ab247e7"} Dec 13 06:48:40 crc kubenswrapper[4644]: I1213 06:48:40.442725 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=1.442711383 podStartE2EDuration="1.442711383s" podCreationTimestamp="2025-12-13 06:48:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:48:40.441307493 +0000 UTC m=+182.656258315" watchObservedRunningTime="2025-12-13 06:48:40.442711383 +0000 UTC m=+182.657662217" Dec 13 06:48:41 crc kubenswrapper[4644]: I1213 06:48:41.437605 4644 generic.go:334] "Generic (PLEG): container finished" podID="30815e6b-8ea4-411c-9f27-7505e2ec2d98" containerID="2fa1b3307cee9457ae728dec4e208838ca3043f55b96e5927442180a8fa909f8" exitCode=0 Dec 13 06:48:41 crc kubenswrapper[4644]: I1213 06:48:41.437752 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"30815e6b-8ea4-411c-9f27-7505e2ec2d98","Type":"ContainerDied","Data":"2fa1b3307cee9457ae728dec4e208838ca3043f55b96e5927442180a8fa909f8"} Dec 13 06:48:41 crc kubenswrapper[4644]: I1213 06:48:41.930939 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-4z6jl"] Dec 13 06:48:42 crc kubenswrapper[4644]: I1213 06:48:42.649223 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 06:48:42 crc kubenswrapper[4644]: I1213 06:48:42.767802 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30815e6b-8ea4-411c-9f27-7505e2ec2d98-kube-api-access\") pod \"30815e6b-8ea4-411c-9f27-7505e2ec2d98\" (UID: \"30815e6b-8ea4-411c-9f27-7505e2ec2d98\") " Dec 13 06:48:42 crc kubenswrapper[4644]: I1213 06:48:42.767900 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/30815e6b-8ea4-411c-9f27-7505e2ec2d98-kubelet-dir\") pod \"30815e6b-8ea4-411c-9f27-7505e2ec2d98\" (UID: \"30815e6b-8ea4-411c-9f27-7505e2ec2d98\") " Dec 13 06:48:42 crc kubenswrapper[4644]: I1213 06:48:42.767965 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/30815e6b-8ea4-411c-9f27-7505e2ec2d98-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "30815e6b-8ea4-411c-9f27-7505e2ec2d98" (UID: "30815e6b-8ea4-411c-9f27-7505e2ec2d98"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:48:42 crc kubenswrapper[4644]: I1213 06:48:42.768303 4644 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/30815e6b-8ea4-411c-9f27-7505e2ec2d98-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:42 crc kubenswrapper[4644]: I1213 06:48:42.778573 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30815e6b-8ea4-411c-9f27-7505e2ec2d98-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "30815e6b-8ea4-411c-9f27-7505e2ec2d98" (UID: "30815e6b-8ea4-411c-9f27-7505e2ec2d98"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:48:42 crc kubenswrapper[4644]: I1213 06:48:42.869719 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30815e6b-8ea4-411c-9f27-7505e2ec2d98-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:43 crc kubenswrapper[4644]: I1213 06:48:43.448154 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"30815e6b-8ea4-411c-9f27-7505e2ec2d98","Type":"ContainerDied","Data":"d737b909745b13c419606b428e1ce159cf12e2c7e737e50b39cef4a18ab247e7"} Dec 13 06:48:43 crc kubenswrapper[4644]: I1213 06:48:43.448192 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d737b909745b13c419606b428e1ce159cf12e2c7e737e50b39cef4a18ab247e7" Dec 13 06:48:43 crc kubenswrapper[4644]: I1213 06:48:43.448232 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 06:48:43 crc kubenswrapper[4644]: I1213 06:48:43.989882 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 13 06:48:43 crc kubenswrapper[4644]: E1213 06:48:43.990288 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30815e6b-8ea4-411c-9f27-7505e2ec2d98" containerName="pruner" Dec 13 06:48:43 crc kubenswrapper[4644]: I1213 06:48:43.990302 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="30815e6b-8ea4-411c-9f27-7505e2ec2d98" containerName="pruner" Dec 13 06:48:43 crc kubenswrapper[4644]: I1213 06:48:43.990402 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="30815e6b-8ea4-411c-9f27-7505e2ec2d98" containerName="pruner" Dec 13 06:48:43 crc kubenswrapper[4644]: I1213 06:48:43.990738 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 13 06:48:43 crc kubenswrapper[4644]: I1213 06:48:43.993091 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 13 06:48:43 crc kubenswrapper[4644]: I1213 06:48:43.996712 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 13 06:48:44 crc kubenswrapper[4644]: I1213 06:48:44.002229 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 13 06:48:44 crc kubenswrapper[4644]: I1213 06:48:44.084424 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/9beaac24-f4b1-461d-94d0-d798a89ce9a0-var-lock\") pod \"installer-9-crc\" (UID: \"9beaac24-f4b1-461d-94d0-d798a89ce9a0\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 06:48:44 crc kubenswrapper[4644]: I1213 06:48:44.084488 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9beaac24-f4b1-461d-94d0-d798a89ce9a0-kubelet-dir\") pod \"installer-9-crc\" (UID: \"9beaac24-f4b1-461d-94d0-d798a89ce9a0\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 06:48:44 crc kubenswrapper[4644]: I1213 06:48:44.084877 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9beaac24-f4b1-461d-94d0-d798a89ce9a0-kube-api-access\") pod \"installer-9-crc\" (UID: \"9beaac24-f4b1-461d-94d0-d798a89ce9a0\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 06:48:44 crc kubenswrapper[4644]: I1213 06:48:44.185988 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9beaac24-f4b1-461d-94d0-d798a89ce9a0-kube-api-access\") pod \"installer-9-crc\" (UID: \"9beaac24-f4b1-461d-94d0-d798a89ce9a0\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 06:48:44 crc kubenswrapper[4644]: I1213 06:48:44.186046 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/9beaac24-f4b1-461d-94d0-d798a89ce9a0-var-lock\") pod \"installer-9-crc\" (UID: \"9beaac24-f4b1-461d-94d0-d798a89ce9a0\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 06:48:44 crc kubenswrapper[4644]: I1213 06:48:44.186070 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9beaac24-f4b1-461d-94d0-d798a89ce9a0-kubelet-dir\") pod \"installer-9-crc\" (UID: \"9beaac24-f4b1-461d-94d0-d798a89ce9a0\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 06:48:44 crc kubenswrapper[4644]: I1213 06:48:44.186146 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/9beaac24-f4b1-461d-94d0-d798a89ce9a0-var-lock\") pod \"installer-9-crc\" (UID: \"9beaac24-f4b1-461d-94d0-d798a89ce9a0\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 06:48:44 crc kubenswrapper[4644]: I1213 06:48:44.186159 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9beaac24-f4b1-461d-94d0-d798a89ce9a0-kubelet-dir\") pod \"installer-9-crc\" (UID: \"9beaac24-f4b1-461d-94d0-d798a89ce9a0\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 06:48:44 crc kubenswrapper[4644]: I1213 06:48:44.206119 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9beaac24-f4b1-461d-94d0-d798a89ce9a0-kube-api-access\") pod \"installer-9-crc\" (UID: \"9beaac24-f4b1-461d-94d0-d798a89ce9a0\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 06:48:44 crc kubenswrapper[4644]: I1213 06:48:44.318366 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 13 06:48:44 crc kubenswrapper[4644]: I1213 06:48:44.699108 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 13 06:48:44 crc kubenswrapper[4644]: W1213 06:48:44.704535 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod9beaac24_f4b1_461d_94d0_d798a89ce9a0.slice/crio-b36d4c3b31f316536b9fad737bdeee73e595f5562e0b53ce85c7c44c9963fb20 WatchSource:0}: Error finding container b36d4c3b31f316536b9fad737bdeee73e595f5562e0b53ce85c7c44c9963fb20: Status 404 returned error can't find the container with id b36d4c3b31f316536b9fad737bdeee73e595f5562e0b53ce85c7c44c9963fb20 Dec 13 06:48:45 crc kubenswrapper[4644]: I1213 06:48:45.458979 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"9beaac24-f4b1-461d-94d0-d798a89ce9a0","Type":"ContainerStarted","Data":"015945f1ae0449376ffcb272bc0933713ea96691f0a2d84f9dff66a8f012d957"} Dec 13 06:48:45 crc kubenswrapper[4644]: I1213 06:48:45.459192 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"9beaac24-f4b1-461d-94d0-d798a89ce9a0","Type":"ContainerStarted","Data":"b36d4c3b31f316536b9fad737bdeee73e595f5562e0b53ce85c7c44c9963fb20"} Dec 13 06:48:45 crc kubenswrapper[4644]: I1213 06:48:45.472306 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.47228864 podStartE2EDuration="2.47228864s" podCreationTimestamp="2025-12-13 06:48:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:48:45.470071309 +0000 UTC m=+187.685022142" watchObservedRunningTime="2025-12-13 06:48:45.47228864 +0000 UTC m=+187.687239473" Dec 13 06:48:49 crc kubenswrapper[4644]: E1213 06:48:49.530771 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e3eddf5_6aec_4c15_8ae7_a7258d60be4e.slice/crio-278812383a31cbe8c7aebda128041970bc1f9109f277592a1b1416fd27928d5e\": RecentStats: unable to find data in memory cache]" Dec 13 06:48:56 crc kubenswrapper[4644]: I1213 06:48:56.617589 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-569cc974b5-64rdj"] Dec 13 06:48:56 crc kubenswrapper[4644]: I1213 06:48:56.618138 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" podUID="b935d3db-f30a-4476-87ae-47e997c8716c" containerName="controller-manager" containerID="cri-o://6d60980f44b670850b93080185dd10b3b16e8911358f37a355975814b8584ee0" gracePeriod=30 Dec 13 06:48:56 crc kubenswrapper[4644]: I1213 06:48:56.626373 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx"] Dec 13 06:48:56 crc kubenswrapper[4644]: I1213 06:48:56.626588 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" podUID="f84a1d5b-3a42-4ca4-83ee-1a02ce28e345" containerName="route-controller-manager" containerID="cri-o://e8fce88780e1ee0ec139751e5c10d3213ef696f5a32c039d441953d79656b90c" gracePeriod=30 Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.089402 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.159534 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-client-ca\") pod \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.159644 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-klxdq\" (UniqueName: \"kubernetes.io/projected/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-kube-api-access-klxdq\") pod \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.159675 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-serving-cert\") pod \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.159735 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-config\") pod \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\" (UID: \"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345\") " Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.160646 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-config" (OuterVolumeSpecName: "config") pod "f84a1d5b-3a42-4ca4-83ee-1a02ce28e345" (UID: "f84a1d5b-3a42-4ca4-83ee-1a02ce28e345"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.160774 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-client-ca" (OuterVolumeSpecName: "client-ca") pod "f84a1d5b-3a42-4ca4-83ee-1a02ce28e345" (UID: "f84a1d5b-3a42-4ca4-83ee-1a02ce28e345"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.164799 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-kube-api-access-klxdq" (OuterVolumeSpecName: "kube-api-access-klxdq") pod "f84a1d5b-3a42-4ca4-83ee-1a02ce28e345" (UID: "f84a1d5b-3a42-4ca4-83ee-1a02ce28e345"). InnerVolumeSpecName "kube-api-access-klxdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.164851 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f84a1d5b-3a42-4ca4-83ee-1a02ce28e345" (UID: "f84a1d5b-3a42-4ca4-83ee-1a02ce28e345"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.182007 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.260923 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-config\") pod \"b935d3db-f30a-4476-87ae-47e997c8716c\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.260993 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-proxy-ca-bundles\") pod \"b935d3db-f30a-4476-87ae-47e997c8716c\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.261072 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-client-ca\") pod \"b935d3db-f30a-4476-87ae-47e997c8716c\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.261096 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrw9v\" (UniqueName: \"kubernetes.io/projected/b935d3db-f30a-4476-87ae-47e997c8716c-kube-api-access-rrw9v\") pod \"b935d3db-f30a-4476-87ae-47e997c8716c\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.261138 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b935d3db-f30a-4476-87ae-47e997c8716c-serving-cert\") pod \"b935d3db-f30a-4476-87ae-47e997c8716c\" (UID: \"b935d3db-f30a-4476-87ae-47e997c8716c\") " Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.261396 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-klxdq\" (UniqueName: \"kubernetes.io/projected/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-kube-api-access-klxdq\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.261414 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.261425 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.261433 4644 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.261788 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-client-ca" (OuterVolumeSpecName: "client-ca") pod "b935d3db-f30a-4476-87ae-47e997c8716c" (UID: "b935d3db-f30a-4476-87ae-47e997c8716c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.261828 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-config" (OuterVolumeSpecName: "config") pod "b935d3db-f30a-4476-87ae-47e997c8716c" (UID: "b935d3db-f30a-4476-87ae-47e997c8716c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.261921 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "b935d3db-f30a-4476-87ae-47e997c8716c" (UID: "b935d3db-f30a-4476-87ae-47e997c8716c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.264292 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b935d3db-f30a-4476-87ae-47e997c8716c-kube-api-access-rrw9v" (OuterVolumeSpecName: "kube-api-access-rrw9v") pod "b935d3db-f30a-4476-87ae-47e997c8716c" (UID: "b935d3db-f30a-4476-87ae-47e997c8716c"). InnerVolumeSpecName "kube-api-access-rrw9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.264407 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b935d3db-f30a-4476-87ae-47e997c8716c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b935d3db-f30a-4476-87ae-47e997c8716c" (UID: "b935d3db-f30a-4476-87ae-47e997c8716c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.362189 4644 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b935d3db-f30a-4476-87ae-47e997c8716c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.362240 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.362250 4644 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.362262 4644 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b935d3db-f30a-4476-87ae-47e997c8716c-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.362270 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrw9v\" (UniqueName: \"kubernetes.io/projected/b935d3db-f30a-4476-87ae-47e997c8716c-kube-api-access-rrw9v\") on node \"crc\" DevicePath \"\"" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.519075 4644 generic.go:334] "Generic (PLEG): container finished" podID="b935d3db-f30a-4476-87ae-47e997c8716c" containerID="6d60980f44b670850b93080185dd10b3b16e8911358f37a355975814b8584ee0" exitCode=0 Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.519134 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.519130 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" event={"ID":"b935d3db-f30a-4476-87ae-47e997c8716c","Type":"ContainerDied","Data":"6d60980f44b670850b93080185dd10b3b16e8911358f37a355975814b8584ee0"} Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.519498 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-569cc974b5-64rdj" event={"ID":"b935d3db-f30a-4476-87ae-47e997c8716c","Type":"ContainerDied","Data":"d117b747ab903e4039a6add61f4f9928de749f1158815422026b5c1325573073"} Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.519518 4644 scope.go:117] "RemoveContainer" containerID="6d60980f44b670850b93080185dd10b3b16e8911358f37a355975814b8584ee0" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.521751 4644 generic.go:334] "Generic (PLEG): container finished" podID="f84a1d5b-3a42-4ca4-83ee-1a02ce28e345" containerID="e8fce88780e1ee0ec139751e5c10d3213ef696f5a32c039d441953d79656b90c" exitCode=0 Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.521786 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" event={"ID":"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345","Type":"ContainerDied","Data":"e8fce88780e1ee0ec139751e5c10d3213ef696f5a32c039d441953d79656b90c"} Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.521805 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" event={"ID":"f84a1d5b-3a42-4ca4-83ee-1a02ce28e345","Type":"ContainerDied","Data":"e6d8982098279e5c3c11ec28e99c0b7b2ff45c4cd0b781ce9e84e1df8c8ec812"} Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.521845 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.536874 4644 scope.go:117] "RemoveContainer" containerID="6d60980f44b670850b93080185dd10b3b16e8911358f37a355975814b8584ee0" Dec 13 06:48:57 crc kubenswrapper[4644]: E1213 06:48:57.537179 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d60980f44b670850b93080185dd10b3b16e8911358f37a355975814b8584ee0\": container with ID starting with 6d60980f44b670850b93080185dd10b3b16e8911358f37a355975814b8584ee0 not found: ID does not exist" containerID="6d60980f44b670850b93080185dd10b3b16e8911358f37a355975814b8584ee0" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.537217 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d60980f44b670850b93080185dd10b3b16e8911358f37a355975814b8584ee0"} err="failed to get container status \"6d60980f44b670850b93080185dd10b3b16e8911358f37a355975814b8584ee0\": rpc error: code = NotFound desc = could not find container \"6d60980f44b670850b93080185dd10b3b16e8911358f37a355975814b8584ee0\": container with ID starting with 6d60980f44b670850b93080185dd10b3b16e8911358f37a355975814b8584ee0 not found: ID does not exist" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.537238 4644 scope.go:117] "RemoveContainer" containerID="e8fce88780e1ee0ec139751e5c10d3213ef696f5a32c039d441953d79656b90c" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.542275 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-569cc974b5-64rdj"] Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.544382 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-569cc974b5-64rdj"] Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.549320 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx"] Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.552101 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-846d56594-pvrgx"] Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.553023 4644 scope.go:117] "RemoveContainer" containerID="e8fce88780e1ee0ec139751e5c10d3213ef696f5a32c039d441953d79656b90c" Dec 13 06:48:57 crc kubenswrapper[4644]: E1213 06:48:57.553309 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8fce88780e1ee0ec139751e5c10d3213ef696f5a32c039d441953d79656b90c\": container with ID starting with e8fce88780e1ee0ec139751e5c10d3213ef696f5a32c039d441953d79656b90c not found: ID does not exist" containerID="e8fce88780e1ee0ec139751e5c10d3213ef696f5a32c039d441953d79656b90c" Dec 13 06:48:57 crc kubenswrapper[4644]: I1213 06:48:57.553340 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8fce88780e1ee0ec139751e5c10d3213ef696f5a32c039d441953d79656b90c"} err="failed to get container status \"e8fce88780e1ee0ec139751e5c10d3213ef696f5a32c039d441953d79656b90c\": rpc error: code = NotFound desc = could not find container \"e8fce88780e1ee0ec139751e5c10d3213ef696f5a32c039d441953d79656b90c\": container with ID starting with e8fce88780e1ee0ec139751e5c10d3213ef696f5a32c039d441953d79656b90c not found: ID does not exist" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.393923 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b935d3db-f30a-4476-87ae-47e997c8716c" path="/var/lib/kubelet/pods/b935d3db-f30a-4476-87ae-47e997c8716c/volumes" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.394530 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f84a1d5b-3a42-4ca4-83ee-1a02ce28e345" path="/var/lib/kubelet/pods/f84a1d5b-3a42-4ca4-83ee-1a02ce28e345/volumes" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.473428 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-59cdcd9748-cftwk"] Dec 13 06:48:58 crc kubenswrapper[4644]: E1213 06:48:58.473660 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f84a1d5b-3a42-4ca4-83ee-1a02ce28e345" containerName="route-controller-manager" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.473677 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f84a1d5b-3a42-4ca4-83ee-1a02ce28e345" containerName="route-controller-manager" Dec 13 06:48:58 crc kubenswrapper[4644]: E1213 06:48:58.473687 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b935d3db-f30a-4476-87ae-47e997c8716c" containerName="controller-manager" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.473693 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="b935d3db-f30a-4476-87ae-47e997c8716c" containerName="controller-manager" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.473794 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="b935d3db-f30a-4476-87ae-47e997c8716c" containerName="controller-manager" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.473804 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f84a1d5b-3a42-4ca4-83ee-1a02ce28e345" containerName="route-controller-manager" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.474152 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.475634 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz"] Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.476015 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.476042 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.476110 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.476164 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.476238 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.476526 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.477195 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.478705 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.479351 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.479390 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.479615 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.479833 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.480585 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.483077 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.487301 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-59cdcd9748-cftwk"] Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.489711 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz"] Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.576113 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41ed6e90-eff5-4e21-84c9-0503f0966c2f-config\") pod \"controller-manager-59cdcd9748-cftwk\" (UID: \"41ed6e90-eff5-4e21-84c9-0503f0966c2f\") " pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.576157 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc5rt\" (UniqueName: \"kubernetes.io/projected/b0321825-9074-4daf-8abc-dbfd95b9a85e-kube-api-access-vc5rt\") pod \"route-controller-manager-5c5bfc6bc9-g74pz\" (UID: \"b0321825-9074-4daf-8abc-dbfd95b9a85e\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.576188 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0321825-9074-4daf-8abc-dbfd95b9a85e-config\") pod \"route-controller-manager-5c5bfc6bc9-g74pz\" (UID: \"b0321825-9074-4daf-8abc-dbfd95b9a85e\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.576213 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b0321825-9074-4daf-8abc-dbfd95b9a85e-client-ca\") pod \"route-controller-manager-5c5bfc6bc9-g74pz\" (UID: \"b0321825-9074-4daf-8abc-dbfd95b9a85e\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.576395 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z62nt\" (UniqueName: \"kubernetes.io/projected/41ed6e90-eff5-4e21-84c9-0503f0966c2f-kube-api-access-z62nt\") pod \"controller-manager-59cdcd9748-cftwk\" (UID: \"41ed6e90-eff5-4e21-84c9-0503f0966c2f\") " pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.576471 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/41ed6e90-eff5-4e21-84c9-0503f0966c2f-client-ca\") pod \"controller-manager-59cdcd9748-cftwk\" (UID: \"41ed6e90-eff5-4e21-84c9-0503f0966c2f\") " pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.576495 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0321825-9074-4daf-8abc-dbfd95b9a85e-serving-cert\") pod \"route-controller-manager-5c5bfc6bc9-g74pz\" (UID: \"b0321825-9074-4daf-8abc-dbfd95b9a85e\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.576554 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/41ed6e90-eff5-4e21-84c9-0503f0966c2f-proxy-ca-bundles\") pod \"controller-manager-59cdcd9748-cftwk\" (UID: \"41ed6e90-eff5-4e21-84c9-0503f0966c2f\") " pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.576593 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41ed6e90-eff5-4e21-84c9-0503f0966c2f-serving-cert\") pod \"controller-manager-59cdcd9748-cftwk\" (UID: \"41ed6e90-eff5-4e21-84c9-0503f0966c2f\") " pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.677283 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc5rt\" (UniqueName: \"kubernetes.io/projected/b0321825-9074-4daf-8abc-dbfd95b9a85e-kube-api-access-vc5rt\") pod \"route-controller-manager-5c5bfc6bc9-g74pz\" (UID: \"b0321825-9074-4daf-8abc-dbfd95b9a85e\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.677621 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0321825-9074-4daf-8abc-dbfd95b9a85e-config\") pod \"route-controller-manager-5c5bfc6bc9-g74pz\" (UID: \"b0321825-9074-4daf-8abc-dbfd95b9a85e\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.677747 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b0321825-9074-4daf-8abc-dbfd95b9a85e-client-ca\") pod \"route-controller-manager-5c5bfc6bc9-g74pz\" (UID: \"b0321825-9074-4daf-8abc-dbfd95b9a85e\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.677861 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z62nt\" (UniqueName: \"kubernetes.io/projected/41ed6e90-eff5-4e21-84c9-0503f0966c2f-kube-api-access-z62nt\") pod \"controller-manager-59cdcd9748-cftwk\" (UID: \"41ed6e90-eff5-4e21-84c9-0503f0966c2f\") " pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.677944 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/41ed6e90-eff5-4e21-84c9-0503f0966c2f-client-ca\") pod \"controller-manager-59cdcd9748-cftwk\" (UID: \"41ed6e90-eff5-4e21-84c9-0503f0966c2f\") " pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.678022 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0321825-9074-4daf-8abc-dbfd95b9a85e-serving-cert\") pod \"route-controller-manager-5c5bfc6bc9-g74pz\" (UID: \"b0321825-9074-4daf-8abc-dbfd95b9a85e\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.678107 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/41ed6e90-eff5-4e21-84c9-0503f0966c2f-proxy-ca-bundles\") pod \"controller-manager-59cdcd9748-cftwk\" (UID: \"41ed6e90-eff5-4e21-84c9-0503f0966c2f\") " pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.678185 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41ed6e90-eff5-4e21-84c9-0503f0966c2f-serving-cert\") pod \"controller-manager-59cdcd9748-cftwk\" (UID: \"41ed6e90-eff5-4e21-84c9-0503f0966c2f\") " pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.678261 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41ed6e90-eff5-4e21-84c9-0503f0966c2f-config\") pod \"controller-manager-59cdcd9748-cftwk\" (UID: \"41ed6e90-eff5-4e21-84c9-0503f0966c2f\") " pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.678493 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b0321825-9074-4daf-8abc-dbfd95b9a85e-client-ca\") pod \"route-controller-manager-5c5bfc6bc9-g74pz\" (UID: \"b0321825-9074-4daf-8abc-dbfd95b9a85e\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.679022 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0321825-9074-4daf-8abc-dbfd95b9a85e-config\") pod \"route-controller-manager-5c5bfc6bc9-g74pz\" (UID: \"b0321825-9074-4daf-8abc-dbfd95b9a85e\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.679232 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/41ed6e90-eff5-4e21-84c9-0503f0966c2f-proxy-ca-bundles\") pod \"controller-manager-59cdcd9748-cftwk\" (UID: \"41ed6e90-eff5-4e21-84c9-0503f0966c2f\") " pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.679407 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/41ed6e90-eff5-4e21-84c9-0503f0966c2f-client-ca\") pod \"controller-manager-59cdcd9748-cftwk\" (UID: \"41ed6e90-eff5-4e21-84c9-0503f0966c2f\") " pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.679682 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41ed6e90-eff5-4e21-84c9-0503f0966c2f-config\") pod \"controller-manager-59cdcd9748-cftwk\" (UID: \"41ed6e90-eff5-4e21-84c9-0503f0966c2f\") " pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.681766 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41ed6e90-eff5-4e21-84c9-0503f0966c2f-serving-cert\") pod \"controller-manager-59cdcd9748-cftwk\" (UID: \"41ed6e90-eff5-4e21-84c9-0503f0966c2f\") " pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.683125 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0321825-9074-4daf-8abc-dbfd95b9a85e-serving-cert\") pod \"route-controller-manager-5c5bfc6bc9-g74pz\" (UID: \"b0321825-9074-4daf-8abc-dbfd95b9a85e\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.690484 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc5rt\" (UniqueName: \"kubernetes.io/projected/b0321825-9074-4daf-8abc-dbfd95b9a85e-kube-api-access-vc5rt\") pod \"route-controller-manager-5c5bfc6bc9-g74pz\" (UID: \"b0321825-9074-4daf-8abc-dbfd95b9a85e\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.691374 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z62nt\" (UniqueName: \"kubernetes.io/projected/41ed6e90-eff5-4e21-84c9-0503f0966c2f-kube-api-access-z62nt\") pod \"controller-manager-59cdcd9748-cftwk\" (UID: \"41ed6e90-eff5-4e21-84c9-0503f0966c2f\") " pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.786678 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:58 crc kubenswrapper[4644]: I1213 06:48:58.796914 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:59 crc kubenswrapper[4644]: I1213 06:48:59.134854 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-59cdcd9748-cftwk"] Dec 13 06:48:59 crc kubenswrapper[4644]: W1213 06:48:59.139178 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41ed6e90_eff5_4e21_84c9_0503f0966c2f.slice/crio-b66e29f5e5e9d0c7879b22d6cecfc15d2eaad01fb7ab5227291b6d4c32f5b371 WatchSource:0}: Error finding container b66e29f5e5e9d0c7879b22d6cecfc15d2eaad01fb7ab5227291b6d4c32f5b371: Status 404 returned error can't find the container with id b66e29f5e5e9d0c7879b22d6cecfc15d2eaad01fb7ab5227291b6d4c32f5b371 Dec 13 06:48:59 crc kubenswrapper[4644]: I1213 06:48:59.178318 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz"] Dec 13 06:48:59 crc kubenswrapper[4644]: W1213 06:48:59.187520 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0321825_9074_4daf_8abc_dbfd95b9a85e.slice/crio-c667a6bc12c693e7eaa2e40f0b583846129a89eb37449a9d03e81b9b5a72bcbc WatchSource:0}: Error finding container c667a6bc12c693e7eaa2e40f0b583846129a89eb37449a9d03e81b9b5a72bcbc: Status 404 returned error can't find the container with id c667a6bc12c693e7eaa2e40f0b583846129a89eb37449a9d03e81b9b5a72bcbc Dec 13 06:48:59 crc kubenswrapper[4644]: I1213 06:48:59.534091 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" event={"ID":"41ed6e90-eff5-4e21-84c9-0503f0966c2f","Type":"ContainerStarted","Data":"2b98720328e0fa40f762545b21506a97ce4588bc17e9bf65e764d4cecbf9c329"} Dec 13 06:48:59 crc kubenswrapper[4644]: I1213 06:48:59.534128 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" event={"ID":"41ed6e90-eff5-4e21-84c9-0503f0966c2f","Type":"ContainerStarted","Data":"b66e29f5e5e9d0c7879b22d6cecfc15d2eaad01fb7ab5227291b6d4c32f5b371"} Dec 13 06:48:59 crc kubenswrapper[4644]: I1213 06:48:59.534691 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:59 crc kubenswrapper[4644]: I1213 06:48:59.535398 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" event={"ID":"b0321825-9074-4daf-8abc-dbfd95b9a85e","Type":"ContainerStarted","Data":"b3e0994fcf29da5e912ba41277116c1c3e81de28d5c3756b309a0f67cb57f5ea"} Dec 13 06:48:59 crc kubenswrapper[4644]: I1213 06:48:59.535424 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" event={"ID":"b0321825-9074-4daf-8abc-dbfd95b9a85e","Type":"ContainerStarted","Data":"c667a6bc12c693e7eaa2e40f0b583846129a89eb37449a9d03e81b9b5a72bcbc"} Dec 13 06:48:59 crc kubenswrapper[4644]: I1213 06:48:59.535561 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:59 crc kubenswrapper[4644]: I1213 06:48:59.539020 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" Dec 13 06:48:59 crc kubenswrapper[4644]: I1213 06:48:59.542031 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" Dec 13 06:48:59 crc kubenswrapper[4644]: I1213 06:48:59.559876 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-59cdcd9748-cftwk" podStartSLOduration=3.559859271 podStartE2EDuration="3.559859271s" podCreationTimestamp="2025-12-13 06:48:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:48:59.55860352 +0000 UTC m=+201.773554352" watchObservedRunningTime="2025-12-13 06:48:59.559859271 +0000 UTC m=+201.774810104" Dec 13 06:48:59 crc kubenswrapper[4644]: I1213 06:48:59.587699 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5c5bfc6bc9-g74pz" podStartSLOduration=3.587683798 podStartE2EDuration="3.587683798s" podCreationTimestamp="2025-12-13 06:48:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:48:59.586476016 +0000 UTC m=+201.801426849" watchObservedRunningTime="2025-12-13 06:48:59.587683798 +0000 UTC m=+201.802634632" Dec 13 06:48:59 crc kubenswrapper[4644]: E1213 06:48:59.630485 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e3eddf5_6aec_4c15_8ae7_a7258d60be4e.slice/crio-278812383a31cbe8c7aebda128041970bc1f9109f277592a1b1416fd27928d5e\": RecentStats: unable to find data in memory cache]" Dec 13 06:49:06 crc kubenswrapper[4644]: I1213 06:49:06.950695 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" podUID="b9059962-adc6-4278-aead-d07a310b9776" containerName="oauth-openshift" containerID="cri-o://d620f3ce6df6c6845844b641ef12589c0a90ca0dc710ed9228d92470c1bfdff6" gracePeriod=15 Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.311975 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.389078 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-ocp-branding-template\") pod \"b9059962-adc6-4278-aead-d07a310b9776\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.389138 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-provider-selection\") pod \"b9059962-adc6-4278-aead-d07a310b9776\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.389159 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-router-certs\") pod \"b9059962-adc6-4278-aead-d07a310b9776\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.389202 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-service-ca\") pod \"b9059962-adc6-4278-aead-d07a310b9776\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.389231 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-error\") pod \"b9059962-adc6-4278-aead-d07a310b9776\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.389248 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-trusted-ca-bundle\") pod \"b9059962-adc6-4278-aead-d07a310b9776\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.389867 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-audit-policies\") pod \"b9059962-adc6-4278-aead-d07a310b9776\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.389894 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-smg29\" (UniqueName: \"kubernetes.io/projected/b9059962-adc6-4278-aead-d07a310b9776-kube-api-access-smg29\") pod \"b9059962-adc6-4278-aead-d07a310b9776\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.389797 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "b9059962-adc6-4278-aead-d07a310b9776" (UID: "b9059962-adc6-4278-aead-d07a310b9776"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.389917 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-serving-cert\") pod \"b9059962-adc6-4278-aead-d07a310b9776\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.389892 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "b9059962-adc6-4278-aead-d07a310b9776" (UID: "b9059962-adc6-4278-aead-d07a310b9776"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.389935 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-login\") pod \"b9059962-adc6-4278-aead-d07a310b9776\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.389954 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9059962-adc6-4278-aead-d07a310b9776-audit-dir\") pod \"b9059962-adc6-4278-aead-d07a310b9776\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.389980 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-cliconfig\") pod \"b9059962-adc6-4278-aead-d07a310b9776\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.389998 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-session\") pod \"b9059962-adc6-4278-aead-d07a310b9776\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.390021 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-idp-0-file-data\") pod \"b9059962-adc6-4278-aead-d07a310b9776\" (UID: \"b9059962-adc6-4278-aead-d07a310b9776\") " Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.390185 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b9059962-adc6-4278-aead-d07a310b9776-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "b9059962-adc6-4278-aead-d07a310b9776" (UID: "b9059962-adc6-4278-aead-d07a310b9776"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.390212 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.390225 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.390258 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "b9059962-adc6-4278-aead-d07a310b9776" (UID: "b9059962-adc6-4278-aead-d07a310b9776"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.390695 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "b9059962-adc6-4278-aead-d07a310b9776" (UID: "b9059962-adc6-4278-aead-d07a310b9776"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.393826 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9059962-adc6-4278-aead-d07a310b9776-kube-api-access-smg29" (OuterVolumeSpecName: "kube-api-access-smg29") pod "b9059962-adc6-4278-aead-d07a310b9776" (UID: "b9059962-adc6-4278-aead-d07a310b9776"). InnerVolumeSpecName "kube-api-access-smg29". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.394527 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "b9059962-adc6-4278-aead-d07a310b9776" (UID: "b9059962-adc6-4278-aead-d07a310b9776"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.394641 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "b9059962-adc6-4278-aead-d07a310b9776" (UID: "b9059962-adc6-4278-aead-d07a310b9776"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.394796 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "b9059962-adc6-4278-aead-d07a310b9776" (UID: "b9059962-adc6-4278-aead-d07a310b9776"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.396839 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "b9059962-adc6-4278-aead-d07a310b9776" (UID: "b9059962-adc6-4278-aead-d07a310b9776"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.397020 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "b9059962-adc6-4278-aead-d07a310b9776" (UID: "b9059962-adc6-4278-aead-d07a310b9776"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.397154 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "b9059962-adc6-4278-aead-d07a310b9776" (UID: "b9059962-adc6-4278-aead-d07a310b9776"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.397427 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "b9059962-adc6-4278-aead-d07a310b9776" (UID: "b9059962-adc6-4278-aead-d07a310b9776"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.397622 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "b9059962-adc6-4278-aead-d07a310b9776" (UID: "b9059962-adc6-4278-aead-d07a310b9776"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.491917 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.491947 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.491959 4644 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9059962-adc6-4278-aead-d07a310b9776-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.491970 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.491979 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.491988 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.491998 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.492006 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.492017 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.492026 4644 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b9059962-adc6-4278-aead-d07a310b9776-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.492034 4644 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9059962-adc6-4278-aead-d07a310b9776-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.492043 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-smg29\" (UniqueName: \"kubernetes.io/projected/b9059962-adc6-4278-aead-d07a310b9776-kube-api-access-smg29\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.572977 4644 generic.go:334] "Generic (PLEG): container finished" podID="b9059962-adc6-4278-aead-d07a310b9776" containerID="d620f3ce6df6c6845844b641ef12589c0a90ca0dc710ed9228d92470c1bfdff6" exitCode=0 Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.573024 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" event={"ID":"b9059962-adc6-4278-aead-d07a310b9776","Type":"ContainerDied","Data":"d620f3ce6df6c6845844b641ef12589c0a90ca0dc710ed9228d92470c1bfdff6"} Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.573056 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" event={"ID":"b9059962-adc6-4278-aead-d07a310b9776","Type":"ContainerDied","Data":"0e360132f9528575592219d90a1f6581252654079c386505aeb050e6c1209234"} Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.573076 4644 scope.go:117] "RemoveContainer" containerID="d620f3ce6df6c6845844b641ef12589c0a90ca0dc710ed9228d92470c1bfdff6" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.573671 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-4z6jl" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.590785 4644 scope.go:117] "RemoveContainer" containerID="d620f3ce6df6c6845844b641ef12589c0a90ca0dc710ed9228d92470c1bfdff6" Dec 13 06:49:07 crc kubenswrapper[4644]: E1213 06:49:07.593258 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d620f3ce6df6c6845844b641ef12589c0a90ca0dc710ed9228d92470c1bfdff6\": container with ID starting with d620f3ce6df6c6845844b641ef12589c0a90ca0dc710ed9228d92470c1bfdff6 not found: ID does not exist" containerID="d620f3ce6df6c6845844b641ef12589c0a90ca0dc710ed9228d92470c1bfdff6" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.593299 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d620f3ce6df6c6845844b641ef12589c0a90ca0dc710ed9228d92470c1bfdff6"} err="failed to get container status \"d620f3ce6df6c6845844b641ef12589c0a90ca0dc710ed9228d92470c1bfdff6\": rpc error: code = NotFound desc = could not find container \"d620f3ce6df6c6845844b641ef12589c0a90ca0dc710ed9228d92470c1bfdff6\": container with ID starting with d620f3ce6df6c6845844b641ef12589c0a90ca0dc710ed9228d92470c1bfdff6 not found: ID does not exist" Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.598211 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-4z6jl"] Dec 13 06:49:07 crc kubenswrapper[4644]: I1213 06:49:07.600138 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-4z6jl"] Dec 13 06:49:08 crc kubenswrapper[4644]: I1213 06:49:08.393979 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9059962-adc6-4278-aead-d07a310b9776" path="/var/lib/kubelet/pods/b9059962-adc6-4278-aead-d07a310b9776/volumes" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.482082 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6775b6d8cc-b497b"] Dec 13 06:49:09 crc kubenswrapper[4644]: E1213 06:49:09.482338 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9059962-adc6-4278-aead-d07a310b9776" containerName="oauth-openshift" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.482354 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9059962-adc6-4278-aead-d07a310b9776" containerName="oauth-openshift" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.482505 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9059962-adc6-4278-aead-d07a310b9776" containerName="oauth-openshift" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.482926 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.485317 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.485361 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.485456 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.485566 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.485608 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.487270 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.487340 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.487361 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.487895 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.487969 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.487982 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.487992 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.493225 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6775b6d8cc-b497b"] Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.496362 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.498334 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.507749 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.618894 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-user-template-login\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.619308 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.619467 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c295e520-7e71-4b39-b79c-e291f7e63de5-audit-policies\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.619586 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-service-ca\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.619701 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c295e520-7e71-4b39-b79c-e291f7e63de5-audit-dir\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.619787 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-router-certs\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.619878 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-session\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.619967 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.620220 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.620279 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2l262\" (UniqueName: \"kubernetes.io/projected/c295e520-7e71-4b39-b79c-e291f7e63de5-kube-api-access-2l262\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.620358 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.620476 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-user-template-error\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.620506 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.620537 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.721476 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.721538 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2l262\" (UniqueName: \"kubernetes.io/projected/c295e520-7e71-4b39-b79c-e291f7e63de5-kube-api-access-2l262\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.721607 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.721655 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-user-template-error\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.721679 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.721701 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.721726 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-user-template-login\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.721759 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.721785 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c295e520-7e71-4b39-b79c-e291f7e63de5-audit-policies\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.721803 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-service-ca\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.721848 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c295e520-7e71-4b39-b79c-e291f7e63de5-audit-dir\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.721867 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-session\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.721886 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-router-certs\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.721909 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.722694 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-service-ca\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.723215 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c295e520-7e71-4b39-b79c-e291f7e63de5-audit-policies\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.723292 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c295e520-7e71-4b39-b79c-e291f7e63de5-audit-dir\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.723226 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.723654 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.726927 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.726947 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-router-certs\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.727070 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-user-template-login\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.727763 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.728020 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.728895 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.729284 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-user-template-error\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.729399 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c295e520-7e71-4b39-b79c-e291f7e63de5-v4-0-config-system-session\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.739157 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2l262\" (UniqueName: \"kubernetes.io/projected/c295e520-7e71-4b39-b79c-e291f7e63de5-kube-api-access-2l262\") pod \"oauth-openshift-6775b6d8cc-b497b\" (UID: \"c295e520-7e71-4b39-b79c-e291f7e63de5\") " pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:09 crc kubenswrapper[4644]: E1213 06:49:09.740270 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e3eddf5_6aec_4c15_8ae7_a7258d60be4e.slice/crio-278812383a31cbe8c7aebda128041970bc1f9109f277592a1b1416fd27928d5e\": RecentStats: unable to find data in memory cache]" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.753838 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.753886 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.753923 4644 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.754558 4644 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6"} pod="openshift-machine-config-operator/machine-config-daemon-45tj4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.754615 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" containerID="cri-o://cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6" gracePeriod=600 Dec 13 06:49:09 crc kubenswrapper[4644]: I1213 06:49:09.796068 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:10 crc kubenswrapper[4644]: I1213 06:49:10.145367 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6775b6d8cc-b497b"] Dec 13 06:49:10 crc kubenswrapper[4644]: W1213 06:49:10.151591 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc295e520_7e71_4b39_b79c_e291f7e63de5.slice/crio-321648f20bed272ee7e8c2cd016f75c47a65cb76e2e69d6a5934f444a3d0b295 WatchSource:0}: Error finding container 321648f20bed272ee7e8c2cd016f75c47a65cb76e2e69d6a5934f444a3d0b295: Status 404 returned error can't find the container with id 321648f20bed272ee7e8c2cd016f75c47a65cb76e2e69d6a5934f444a3d0b295 Dec 13 06:49:10 crc kubenswrapper[4644]: I1213 06:49:10.588862 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" event={"ID":"c295e520-7e71-4b39-b79c-e291f7e63de5","Type":"ContainerStarted","Data":"06ef6bb6e481daeefec85c3e2c6d615f91a57dfd696cd906b5c4ba63de37be18"} Dec 13 06:49:10 crc kubenswrapper[4644]: I1213 06:49:10.589093 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" event={"ID":"c295e520-7e71-4b39-b79c-e291f7e63de5","Type":"ContainerStarted","Data":"321648f20bed272ee7e8c2cd016f75c47a65cb76e2e69d6a5934f444a3d0b295"} Dec 13 06:49:10 crc kubenswrapper[4644]: I1213 06:49:10.589114 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:10 crc kubenswrapper[4644]: I1213 06:49:10.593014 4644 generic.go:334] "Generic (PLEG): container finished" podID="48240f19-087e-4597-b448-ab1a190a5027" containerID="cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6" exitCode=0 Dec 13 06:49:10 crc kubenswrapper[4644]: I1213 06:49:10.593052 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerDied","Data":"cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6"} Dec 13 06:49:10 crc kubenswrapper[4644]: I1213 06:49:10.593079 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerStarted","Data":"940bbee9b20bdd362b830e1ec88ca33332cf28bd753c6bedbbb57aa771fa906d"} Dec 13 06:49:10 crc kubenswrapper[4644]: I1213 06:49:10.595026 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" Dec 13 06:49:10 crc kubenswrapper[4644]: I1213 06:49:10.607426 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6775b6d8cc-b497b" podStartSLOduration=29.607408715 podStartE2EDuration="29.607408715s" podCreationTimestamp="2025-12-13 06:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:49:10.604405826 +0000 UTC m=+212.819356659" watchObservedRunningTime="2025-12-13 06:49:10.607408715 +0000 UTC m=+212.822359548" Dec 13 06:49:19 crc kubenswrapper[4644]: E1213 06:49:19.833130 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e3eddf5_6aec_4c15_8ae7_a7258d60be4e.slice/crio-278812383a31cbe8c7aebda128041970bc1f9109f277592a1b1416fd27928d5e\": RecentStats: unable to find data in memory cache]" Dec 13 06:49:21 crc kubenswrapper[4644]: I1213 06:49:21.984154 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-54mm5"] Dec 13 06:49:21 crc kubenswrapper[4644]: I1213 06:49:21.984719 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-54mm5" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" containerName="registry-server" containerID="cri-o://1fe23cd40df07897833dbd638f5359afa9604c782334dfcb9c06b9600ef516d9" gracePeriod=30 Dec 13 06:49:21 crc kubenswrapper[4644]: I1213 06:49:21.989300 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2bmvc"] Dec 13 06:49:21 crc kubenswrapper[4644]: I1213 06:49:21.989572 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2bmvc" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" containerName="registry-server" containerID="cri-o://53d9e38c4aa5494a6091b4cf1765c744f439df0001d5243d593272ec35ef5f3d" gracePeriod=30 Dec 13 06:49:21 crc kubenswrapper[4644]: I1213 06:49:21.997335 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-crmqj"] Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:21.998642 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" containerName="marketplace-operator" containerID="cri-o://15fa61a9e529b0e0eca5efafb4d687d9e4b08ddb595b16a2abdea0c61eff7963" gracePeriod=30 Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.002797 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pngbm"] Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.003052 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pngbm" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" containerName="registry-server" containerID="cri-o://5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420" gracePeriod=30 Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.017544 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gb9x7"] Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.018255 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.019159 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gqhfh"] Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.019359 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gqhfh" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" containerName="registry-server" containerID="cri-o://3a5ec60ffb3914f4bef7e7105fd5b5ad42f867ba66e2f5ec1dbe5b8dcd47f92f" gracePeriod=30 Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.028538 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gb9x7"] Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.172334 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/33f05cad-7917-4d9a-870d-b68d4388bbde-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gb9x7\" (UID: \"33f05cad-7917-4d9a-870d-b68d4388bbde\") " pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.172675 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vs22s\" (UniqueName: \"kubernetes.io/projected/33f05cad-7917-4d9a-870d-b68d4388bbde-kube-api-access-vs22s\") pod \"marketplace-operator-79b997595-gb9x7\" (UID: \"33f05cad-7917-4d9a-870d-b68d4388bbde\") " pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.173070 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/33f05cad-7917-4d9a-870d-b68d4388bbde-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gb9x7\" (UID: \"33f05cad-7917-4d9a-870d-b68d4388bbde\") " pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.274769 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/33f05cad-7917-4d9a-870d-b68d4388bbde-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gb9x7\" (UID: \"33f05cad-7917-4d9a-870d-b68d4388bbde\") " pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.274884 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/33f05cad-7917-4d9a-870d-b68d4388bbde-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gb9x7\" (UID: \"33f05cad-7917-4d9a-870d-b68d4388bbde\") " pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.274926 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vs22s\" (UniqueName: \"kubernetes.io/projected/33f05cad-7917-4d9a-870d-b68d4388bbde-kube-api-access-vs22s\") pod \"marketplace-operator-79b997595-gb9x7\" (UID: \"33f05cad-7917-4d9a-870d-b68d4388bbde\") " pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.275922 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/33f05cad-7917-4d9a-870d-b68d4388bbde-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gb9x7\" (UID: \"33f05cad-7917-4d9a-870d-b68d4388bbde\") " pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.280188 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/33f05cad-7917-4d9a-870d-b68d4388bbde-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gb9x7\" (UID: \"33f05cad-7917-4d9a-870d-b68d4388bbde\") " pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.293132 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vs22s\" (UniqueName: \"kubernetes.io/projected/33f05cad-7917-4d9a-870d-b68d4388bbde-kube-api-access-vs22s\") pod \"marketplace-operator-79b997595-gb9x7\" (UID: \"33f05cad-7917-4d9a-870d-b68d4388bbde\") " pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.307081 4644 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420 is running failed: container process not found" containerID="5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420" cmd=["grpc_health_probe","-addr=:50051"] Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.307479 4644 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420 is running failed: container process not found" containerID="5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420" cmd=["grpc_health_probe","-addr=:50051"] Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.307780 4644 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420 is running failed: container process not found" containerID="5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420" cmd=["grpc_health_probe","-addr=:50051"] Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.307823 4644 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-pngbm" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" containerName="registry-server" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.330915 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.460655 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.545768 4644 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.546222 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" containerName="registry-server" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546240 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" containerName="registry-server" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.546252 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" containerName="extract-utilities" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546258 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" containerName="extract-utilities" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.546273 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" containerName="extract-content" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546279 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" containerName="extract-content" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546369 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" containerName="registry-server" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546679 4644 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546708 4644 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.546805 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546819 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.546833 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546840 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.546846 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546852 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.546861 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546867 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.546873 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546879 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.546888 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546893 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.546901 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546906 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546981 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546988 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546995 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.547006 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.547014 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.546869 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.547092 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844" gracePeriod=15 Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.547498 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.548015 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7" gracePeriod=15 Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.548303 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638" gracePeriod=15 Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.548358 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2" gracePeriod=15 Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.548391 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334" gracePeriod=15 Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.550846 4644 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.579289 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b355a1b-28e0-462c-a1ef-43eea6341565-catalog-content\") pod \"4b355a1b-28e0-462c-a1ef-43eea6341565\" (UID: \"4b355a1b-28e0-462c-a1ef-43eea6341565\") " Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.579335 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b355a1b-28e0-462c-a1ef-43eea6341565-utilities\") pod \"4b355a1b-28e0-462c-a1ef-43eea6341565\" (UID: \"4b355a1b-28e0-462c-a1ef-43eea6341565\") " Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.579392 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5cjq\" (UniqueName: \"kubernetes.io/projected/4b355a1b-28e0-462c-a1ef-43eea6341565-kube-api-access-x5cjq\") pod \"4b355a1b-28e0-462c-a1ef-43eea6341565\" (UID: \"4b355a1b-28e0-462c-a1ef-43eea6341565\") " Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.580551 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b355a1b-28e0-462c-a1ef-43eea6341565-utilities" (OuterVolumeSpecName: "utilities") pod "4b355a1b-28e0-462c-a1ef-43eea6341565" (UID: "4b355a1b-28e0-462c-a1ef-43eea6341565"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.584886 4644 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 192.168.25.89:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.597803 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b355a1b-28e0-462c-a1ef-43eea6341565-kube-api-access-x5cjq" (OuterVolumeSpecName: "kube-api-access-x5cjq") pod "4b355a1b-28e0-462c-a1ef-43eea6341565" (UID: "4b355a1b-28e0-462c-a1ef-43eea6341565"). InnerVolumeSpecName "kube-api-access-x5cjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.626593 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.627189 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.630775 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.631163 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.631546 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.633603 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.633900 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.634232 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.634907 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.638962 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.639283 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.639338 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b355a1b-28e0-462c-a1ef-43eea6341565-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4b355a1b-28e0-462c-a1ef-43eea6341565" (UID: "4b355a1b-28e0-462c-a1ef-43eea6341565"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.639826 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.640844 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.641099 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.648324 4644 generic.go:334] "Generic (PLEG): container finished" podID="10291551-2baf-4271-bc49-6a40e5ceb94b" containerID="1fe23cd40df07897833dbd638f5359afa9604c782334dfcb9c06b9600ef516d9" exitCode=0 Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.648360 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-54mm5" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.648384 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-54mm5" event={"ID":"10291551-2baf-4271-bc49-6a40e5ceb94b","Type":"ContainerDied","Data":"1fe23cd40df07897833dbd638f5359afa9604c782334dfcb9c06b9600ef516d9"} Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.648428 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-54mm5" event={"ID":"10291551-2baf-4271-bc49-6a40e5ceb94b","Type":"ContainerDied","Data":"cf7fec9b8e68918e062be8ca425fd99a36eba9b696859bdcdf2a74796cb14711"} Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.648460 4644 scope.go:117] "RemoveContainer" containerID="1fe23cd40df07897833dbd638f5359afa9604c782334dfcb9c06b9600ef516d9" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.649006 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.649296 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.649520 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.649698 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.651501 4644 generic.go:334] "Generic (PLEG): container finished" podID="8b4d6920-0938-4fcf-a825-bfaec69da684" containerID="5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420" exitCode=0 Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.651584 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pngbm" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.651752 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pngbm" event={"ID":"8b4d6920-0938-4fcf-a825-bfaec69da684","Type":"ContainerDied","Data":"5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420"} Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.652034 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.652421 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pngbm" event={"ID":"8b4d6920-0938-4fcf-a825-bfaec69da684","Type":"ContainerDied","Data":"02b66bf716c03b13322f150885ef0304fbce941da96d17de86526d1c32be4993"} Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.652930 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.653139 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.654037 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.655452 4644 generic.go:334] "Generic (PLEG): container finished" podID="4b355a1b-28e0-462c-a1ef-43eea6341565" containerID="53d9e38c4aa5494a6091b4cf1765c744f439df0001d5243d593272ec35ef5f3d" exitCode=0 Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.655536 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2bmvc" event={"ID":"4b355a1b-28e0-462c-a1ef-43eea6341565","Type":"ContainerDied","Data":"53d9e38c4aa5494a6091b4cf1765c744f439df0001d5243d593272ec35ef5f3d"} Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.655574 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2bmvc" event={"ID":"4b355a1b-28e0-462c-a1ef-43eea6341565","Type":"ContainerDied","Data":"3f13444f58d076d4fa403284d876796fd46ed75cedf126c2008cc73f53a7a35f"} Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.655645 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2bmvc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.661337 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.662008 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.662390 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.662843 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.663065 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.664052 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gqhfh" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.664080 4644 generic.go:334] "Generic (PLEG): container finished" podID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" containerID="3a5ec60ffb3914f4bef7e7105fd5b5ad42f867ba66e2f5ec1dbe5b8dcd47f92f" exitCode=0 Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.664146 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqhfh" event={"ID":"e9452a17-eda0-4e66-bf4f-30e6e8ac8693","Type":"ContainerDied","Data":"3a5ec60ffb3914f4bef7e7105fd5b5ad42f867ba66e2f5ec1dbe5b8dcd47f92f"} Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.664213 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gqhfh" event={"ID":"e9452a17-eda0-4e66-bf4f-30e6e8ac8693","Type":"ContainerDied","Data":"cf32b06cd1f23c88232e910e8ed7ec37e1de7d39efdf995e7bc7315ff993d4fe"} Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.664553 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.664763 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.664993 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.665257 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.665526 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.666677 4644 generic.go:334] "Generic (PLEG): container finished" podID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" containerID="15fa61a9e529b0e0eca5efafb4d687d9e4b08ddb595b16a2abdea0c61eff7963" exitCode=0 Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.666748 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" event={"ID":"5de2f44b-564a-461c-b9e1-b4b306d8ecb1","Type":"ContainerDied","Data":"15fa61a9e529b0e0eca5efafb4d687d9e4b08ddb595b16a2abdea0c61eff7963"} Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.666775 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" event={"ID":"5de2f44b-564a-461c-b9e1-b4b306d8ecb1","Type":"ContainerDied","Data":"0382c6b7e3c2c56dcdebc42e8370caaae8c7a207413272139b4465a0dbef8cf1"} Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.666776 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.667209 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.667530 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.667825 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.668028 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.668181 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.669585 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.669773 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.669940 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.670088 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.670239 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.673994 4644 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.674430 4644 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.674741 4644 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.675110 4644 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.675491 4644 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.675514 4644 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.675740 4644 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="200ms" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.682717 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.682901 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.683011 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.683087 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.683109 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.683164 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.683307 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.683459 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.683644 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b355a1b-28e0-462c-a1ef-43eea6341565-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.683674 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b355a1b-28e0-462c-a1ef-43eea6341565-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.683706 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5cjq\" (UniqueName: \"kubernetes.io/projected/4b355a1b-28e0-462c-a1ef-43eea6341565-kube-api-access-x5cjq\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.710272 4644 scope.go:117] "RemoveContainer" containerID="bd5c77c633c156461e97f0dcacc96167db72c22772078c7f793efd58c44b80c0" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.725948 4644 scope.go:117] "RemoveContainer" containerID="1f91ca4c5f67f02fc7e4530efcbcbcd6aa336b3c38a4008204ff8a4118d8b746" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.739568 4644 scope.go:117] "RemoveContainer" containerID="1fe23cd40df07897833dbd638f5359afa9604c782334dfcb9c06b9600ef516d9" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.739899 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fe23cd40df07897833dbd638f5359afa9604c782334dfcb9c06b9600ef516d9\": container with ID starting with 1fe23cd40df07897833dbd638f5359afa9604c782334dfcb9c06b9600ef516d9 not found: ID does not exist" containerID="1fe23cd40df07897833dbd638f5359afa9604c782334dfcb9c06b9600ef516d9" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.739929 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fe23cd40df07897833dbd638f5359afa9604c782334dfcb9c06b9600ef516d9"} err="failed to get container status \"1fe23cd40df07897833dbd638f5359afa9604c782334dfcb9c06b9600ef516d9\": rpc error: code = NotFound desc = could not find container \"1fe23cd40df07897833dbd638f5359afa9604c782334dfcb9c06b9600ef516d9\": container with ID starting with 1fe23cd40df07897833dbd638f5359afa9604c782334dfcb9c06b9600ef516d9 not found: ID does not exist" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.739950 4644 scope.go:117] "RemoveContainer" containerID="bd5c77c633c156461e97f0dcacc96167db72c22772078c7f793efd58c44b80c0" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.740465 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd5c77c633c156461e97f0dcacc96167db72c22772078c7f793efd58c44b80c0\": container with ID starting with bd5c77c633c156461e97f0dcacc96167db72c22772078c7f793efd58c44b80c0 not found: ID does not exist" containerID="bd5c77c633c156461e97f0dcacc96167db72c22772078c7f793efd58c44b80c0" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.740488 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd5c77c633c156461e97f0dcacc96167db72c22772078c7f793efd58c44b80c0"} err="failed to get container status \"bd5c77c633c156461e97f0dcacc96167db72c22772078c7f793efd58c44b80c0\": rpc error: code = NotFound desc = could not find container \"bd5c77c633c156461e97f0dcacc96167db72c22772078c7f793efd58c44b80c0\": container with ID starting with bd5c77c633c156461e97f0dcacc96167db72c22772078c7f793efd58c44b80c0 not found: ID does not exist" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.740502 4644 scope.go:117] "RemoveContainer" containerID="1f91ca4c5f67f02fc7e4530efcbcbcd6aa336b3c38a4008204ff8a4118d8b746" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.740936 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f91ca4c5f67f02fc7e4530efcbcbcd6aa336b3c38a4008204ff8a4118d8b746\": container with ID starting with 1f91ca4c5f67f02fc7e4530efcbcbcd6aa336b3c38a4008204ff8a4118d8b746 not found: ID does not exist" containerID="1f91ca4c5f67f02fc7e4530efcbcbcd6aa336b3c38a4008204ff8a4118d8b746" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.740984 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f91ca4c5f67f02fc7e4530efcbcbcd6aa336b3c38a4008204ff8a4118d8b746"} err="failed to get container status \"1f91ca4c5f67f02fc7e4530efcbcbcd6aa336b3c38a4008204ff8a4118d8b746\": rpc error: code = NotFound desc = could not find container \"1f91ca4c5f67f02fc7e4530efcbcbcd6aa336b3c38a4008204ff8a4118d8b746\": container with ID starting with 1f91ca4c5f67f02fc7e4530efcbcbcd6aa336b3c38a4008204ff8a4118d8b746 not found: ID does not exist" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.741021 4644 scope.go:117] "RemoveContainer" containerID="5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.755656 4644 scope.go:117] "RemoveContainer" containerID="f83496b87e599da0720cd2d8324f929605305836a85b1bb5d08acce122934647" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.784644 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-utilities\") pod \"e9452a17-eda0-4e66-bf4f-30e6e8ac8693\" (UID: \"e9452a17-eda0-4e66-bf4f-30e6e8ac8693\") " Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.784708 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10291551-2baf-4271-bc49-6a40e5ceb94b-utilities\") pod \"10291551-2baf-4271-bc49-6a40e5ceb94b\" (UID: \"10291551-2baf-4271-bc49-6a40e5ceb94b\") " Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.784742 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxm94\" (UniqueName: \"kubernetes.io/projected/10291551-2baf-4271-bc49-6a40e5ceb94b-kube-api-access-hxm94\") pod \"10291551-2baf-4271-bc49-6a40e5ceb94b\" (UID: \"10291551-2baf-4271-bc49-6a40e5ceb94b\") " Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.784761 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-catalog-content\") pod \"e9452a17-eda0-4e66-bf4f-30e6e8ac8693\" (UID: \"e9452a17-eda0-4e66-bf4f-30e6e8ac8693\") " Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.784794 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-marketplace-operator-metrics\") pod \"5de2f44b-564a-461c-b9e1-b4b306d8ecb1\" (UID: \"5de2f44b-564a-461c-b9e1-b4b306d8ecb1\") " Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.784831 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzgms\" (UniqueName: \"kubernetes.io/projected/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-kube-api-access-vzgms\") pod \"5de2f44b-564a-461c-b9e1-b4b306d8ecb1\" (UID: \"5de2f44b-564a-461c-b9e1-b4b306d8ecb1\") " Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.784852 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b4d6920-0938-4fcf-a825-bfaec69da684-catalog-content\") pod \"8b4d6920-0938-4fcf-a825-bfaec69da684\" (UID: \"8b4d6920-0938-4fcf-a825-bfaec69da684\") " Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.784896 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10291551-2baf-4271-bc49-6a40e5ceb94b-catalog-content\") pod \"10291551-2baf-4271-bc49-6a40e5ceb94b\" (UID: \"10291551-2baf-4271-bc49-6a40e5ceb94b\") " Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.784917 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnv5r\" (UniqueName: \"kubernetes.io/projected/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-kube-api-access-hnv5r\") pod \"e9452a17-eda0-4e66-bf4f-30e6e8ac8693\" (UID: \"e9452a17-eda0-4e66-bf4f-30e6e8ac8693\") " Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.784954 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-marketplace-trusted-ca\") pod \"5de2f44b-564a-461c-b9e1-b4b306d8ecb1\" (UID: \"5de2f44b-564a-461c-b9e1-b4b306d8ecb1\") " Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.784979 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b4d6920-0938-4fcf-a825-bfaec69da684-utilities\") pod \"8b4d6920-0938-4fcf-a825-bfaec69da684\" (UID: \"8b4d6920-0938-4fcf-a825-bfaec69da684\") " Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785009 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8km7q\" (UniqueName: \"kubernetes.io/projected/8b4d6920-0938-4fcf-a825-bfaec69da684-kube-api-access-8km7q\") pod \"8b4d6920-0938-4fcf-a825-bfaec69da684\" (UID: \"8b4d6920-0938-4fcf-a825-bfaec69da684\") " Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785142 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785181 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785208 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785221 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785242 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785266 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785284 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785330 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785422 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785477 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785471 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785500 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785478 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785542 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.785567 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.786010 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "5de2f44b-564a-461c-b9e1-b4b306d8ecb1" (UID: "5de2f44b-564a-461c-b9e1-b4b306d8ecb1"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.786051 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.786288 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b4d6920-0938-4fcf-a825-bfaec69da684-utilities" (OuterVolumeSpecName: "utilities") pod "8b4d6920-0938-4fcf-a825-bfaec69da684" (UID: "8b4d6920-0938-4fcf-a825-bfaec69da684"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.786630 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-utilities" (OuterVolumeSpecName: "utilities") pod "e9452a17-eda0-4e66-bf4f-30e6e8ac8693" (UID: "e9452a17-eda0-4e66-bf4f-30e6e8ac8693"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.787428 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10291551-2baf-4271-bc49-6a40e5ceb94b-kube-api-access-hxm94" (OuterVolumeSpecName: "kube-api-access-hxm94") pod "10291551-2baf-4271-bc49-6a40e5ceb94b" (UID: "10291551-2baf-4271-bc49-6a40e5ceb94b"). InnerVolumeSpecName "kube-api-access-hxm94". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.788079 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b4d6920-0938-4fcf-a825-bfaec69da684-kube-api-access-8km7q" (OuterVolumeSpecName: "kube-api-access-8km7q") pod "8b4d6920-0938-4fcf-a825-bfaec69da684" (UID: "8b4d6920-0938-4fcf-a825-bfaec69da684"). InnerVolumeSpecName "kube-api-access-8km7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.788264 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-kube-api-access-hnv5r" (OuterVolumeSpecName: "kube-api-access-hnv5r") pod "e9452a17-eda0-4e66-bf4f-30e6e8ac8693" (UID: "e9452a17-eda0-4e66-bf4f-30e6e8ac8693"). InnerVolumeSpecName "kube-api-access-hnv5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.788319 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-kube-api-access-vzgms" (OuterVolumeSpecName: "kube-api-access-vzgms") pod "5de2f44b-564a-461c-b9e1-b4b306d8ecb1" (UID: "5de2f44b-564a-461c-b9e1-b4b306d8ecb1"). InnerVolumeSpecName "kube-api-access-vzgms". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.788666 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "5de2f44b-564a-461c-b9e1-b4b306d8ecb1" (UID: "5de2f44b-564a-461c-b9e1-b4b306d8ecb1"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.788911 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10291551-2baf-4271-bc49-6a40e5ceb94b-utilities" (OuterVolumeSpecName: "utilities") pod "10291551-2baf-4271-bc49-6a40e5ceb94b" (UID: "10291551-2baf-4271-bc49-6a40e5ceb94b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.803764 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b4d6920-0938-4fcf-a825-bfaec69da684-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8b4d6920-0938-4fcf-a825-bfaec69da684" (UID: "8b4d6920-0938-4fcf-a825-bfaec69da684"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.807943 4644 scope.go:117] "RemoveContainer" containerID="904bfa839cfea4be09a34ba4b806354a28a3129fb40c96f23d3f17375bfb659c" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.819609 4644 scope.go:117] "RemoveContainer" containerID="5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.820711 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420\": container with ID starting with 5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420 not found: ID does not exist" containerID="5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.820754 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420"} err="failed to get container status \"5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420\": rpc error: code = NotFound desc = could not find container \"5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420\": container with ID starting with 5809b6a284b87759519fd5e20ffc94636fdf6f17b4645ece4d5925f7a37b5420 not found: ID does not exist" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.820788 4644 scope.go:117] "RemoveContainer" containerID="f83496b87e599da0720cd2d8324f929605305836a85b1bb5d08acce122934647" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.821281 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f83496b87e599da0720cd2d8324f929605305836a85b1bb5d08acce122934647\": container with ID starting with f83496b87e599da0720cd2d8324f929605305836a85b1bb5d08acce122934647 not found: ID does not exist" containerID="f83496b87e599da0720cd2d8324f929605305836a85b1bb5d08acce122934647" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.821330 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f83496b87e599da0720cd2d8324f929605305836a85b1bb5d08acce122934647"} err="failed to get container status \"f83496b87e599da0720cd2d8324f929605305836a85b1bb5d08acce122934647\": rpc error: code = NotFound desc = could not find container \"f83496b87e599da0720cd2d8324f929605305836a85b1bb5d08acce122934647\": container with ID starting with f83496b87e599da0720cd2d8324f929605305836a85b1bb5d08acce122934647 not found: ID does not exist" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.821358 4644 scope.go:117] "RemoveContainer" containerID="904bfa839cfea4be09a34ba4b806354a28a3129fb40c96f23d3f17375bfb659c" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.821680 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"904bfa839cfea4be09a34ba4b806354a28a3129fb40c96f23d3f17375bfb659c\": container with ID starting with 904bfa839cfea4be09a34ba4b806354a28a3129fb40c96f23d3f17375bfb659c not found: ID does not exist" containerID="904bfa839cfea4be09a34ba4b806354a28a3129fb40c96f23d3f17375bfb659c" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.821709 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"904bfa839cfea4be09a34ba4b806354a28a3129fb40c96f23d3f17375bfb659c"} err="failed to get container status \"904bfa839cfea4be09a34ba4b806354a28a3129fb40c96f23d3f17375bfb659c\": rpc error: code = NotFound desc = could not find container \"904bfa839cfea4be09a34ba4b806354a28a3129fb40c96f23d3f17375bfb659c\": container with ID starting with 904bfa839cfea4be09a34ba4b806354a28a3129fb40c96f23d3f17375bfb659c not found: ID does not exist" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.821726 4644 scope.go:117] "RemoveContainer" containerID="53d9e38c4aa5494a6091b4cf1765c744f439df0001d5243d593272ec35ef5f3d" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.834236 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10291551-2baf-4271-bc49-6a40e5ceb94b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "10291551-2baf-4271-bc49-6a40e5ceb94b" (UID: "10291551-2baf-4271-bc49-6a40e5ceb94b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.834635 4644 scope.go:117] "RemoveContainer" containerID="d3ac3afb692ecd1394722bb0326bc5075372023eb4cc09244bd228e403fbe225" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.847018 4644 scope.go:117] "RemoveContainer" containerID="f85adda5ac263603f5f8f57c36f1ae3a7d878908d1e8ab35b766e5b6df80d534" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.865070 4644 scope.go:117] "RemoveContainer" containerID="53d9e38c4aa5494a6091b4cf1765c744f439df0001d5243d593272ec35ef5f3d" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.865481 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53d9e38c4aa5494a6091b4cf1765c744f439df0001d5243d593272ec35ef5f3d\": container with ID starting with 53d9e38c4aa5494a6091b4cf1765c744f439df0001d5243d593272ec35ef5f3d not found: ID does not exist" containerID="53d9e38c4aa5494a6091b4cf1765c744f439df0001d5243d593272ec35ef5f3d" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.865516 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53d9e38c4aa5494a6091b4cf1765c744f439df0001d5243d593272ec35ef5f3d"} err="failed to get container status \"53d9e38c4aa5494a6091b4cf1765c744f439df0001d5243d593272ec35ef5f3d\": rpc error: code = NotFound desc = could not find container \"53d9e38c4aa5494a6091b4cf1765c744f439df0001d5243d593272ec35ef5f3d\": container with ID starting with 53d9e38c4aa5494a6091b4cf1765c744f439df0001d5243d593272ec35ef5f3d not found: ID does not exist" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.865541 4644 scope.go:117] "RemoveContainer" containerID="d3ac3afb692ecd1394722bb0326bc5075372023eb4cc09244bd228e403fbe225" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.865744 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3ac3afb692ecd1394722bb0326bc5075372023eb4cc09244bd228e403fbe225\": container with ID starting with d3ac3afb692ecd1394722bb0326bc5075372023eb4cc09244bd228e403fbe225 not found: ID does not exist" containerID="d3ac3afb692ecd1394722bb0326bc5075372023eb4cc09244bd228e403fbe225" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.865770 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3ac3afb692ecd1394722bb0326bc5075372023eb4cc09244bd228e403fbe225"} err="failed to get container status \"d3ac3afb692ecd1394722bb0326bc5075372023eb4cc09244bd228e403fbe225\": rpc error: code = NotFound desc = could not find container \"d3ac3afb692ecd1394722bb0326bc5075372023eb4cc09244bd228e403fbe225\": container with ID starting with d3ac3afb692ecd1394722bb0326bc5075372023eb4cc09244bd228e403fbe225 not found: ID does not exist" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.865787 4644 scope.go:117] "RemoveContainer" containerID="f85adda5ac263603f5f8f57c36f1ae3a7d878908d1e8ab35b766e5b6df80d534" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.866050 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f85adda5ac263603f5f8f57c36f1ae3a7d878908d1e8ab35b766e5b6df80d534\": container with ID starting with f85adda5ac263603f5f8f57c36f1ae3a7d878908d1e8ab35b766e5b6df80d534 not found: ID does not exist" containerID="f85adda5ac263603f5f8f57c36f1ae3a7d878908d1e8ab35b766e5b6df80d534" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.866077 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f85adda5ac263603f5f8f57c36f1ae3a7d878908d1e8ab35b766e5b6df80d534"} err="failed to get container status \"f85adda5ac263603f5f8f57c36f1ae3a7d878908d1e8ab35b766e5b6df80d534\": rpc error: code = NotFound desc = could not find container \"f85adda5ac263603f5f8f57c36f1ae3a7d878908d1e8ab35b766e5b6df80d534\": container with ID starting with f85adda5ac263603f5f8f57c36f1ae3a7d878908d1e8ab35b766e5b6df80d534 not found: ID does not exist" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.866103 4644 scope.go:117] "RemoveContainer" containerID="3a5ec60ffb3914f4bef7e7105fd5b5ad42f867ba66e2f5ec1dbe5b8dcd47f92f" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.876830 4644 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="400ms" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.879542 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e9452a17-eda0-4e66-bf4f-30e6e8ac8693" (UID: "e9452a17-eda0-4e66-bf4f-30e6e8ac8693"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.887109 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzgms\" (UniqueName: \"kubernetes.io/projected/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-kube-api-access-vzgms\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.887141 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b4d6920-0938-4fcf-a825-bfaec69da684-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.887158 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10291551-2baf-4271-bc49-6a40e5ceb94b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.887168 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnv5r\" (UniqueName: \"kubernetes.io/projected/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-kube-api-access-hnv5r\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.887178 4644 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.887188 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b4d6920-0938-4fcf-a825-bfaec69da684-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.887201 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8km7q\" (UniqueName: \"kubernetes.io/projected/8b4d6920-0938-4fcf-a825-bfaec69da684-kube-api-access-8km7q\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.887210 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.887218 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10291551-2baf-4271-bc49-6a40e5ceb94b-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.887226 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxm94\" (UniqueName: \"kubernetes.io/projected/10291551-2baf-4271-bc49-6a40e5ceb94b-kube-api-access-hxm94\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.887240 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9452a17-eda0-4e66-bf4f-30e6e8ac8693-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.887249 4644 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5de2f44b-564a-461c-b9e1-b4b306d8ecb1-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.887718 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.891958 4644 scope.go:117] "RemoveContainer" containerID="18544f7158f117b0d332a235e59e34a9a6a082154d4b31619a3fb49fbd38d7b6" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.894776 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:49:22Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:49:22Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:49:22Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T06:49:22Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.895112 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.895356 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.895602 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.895779 4644 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.895800 4644 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 13 06:49:22 crc kubenswrapper[4644]: W1213 06:49:22.912096 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-4e3c6774158e797903a3df06d8a0d4ad85c13de0515c0d257f8449f4e3c75f3a WatchSource:0}: Error finding container 4e3c6774158e797903a3df06d8a0d4ad85c13de0515c0d257f8449f4e3c75f3a: Status 404 returned error can't find the container with id 4e3c6774158e797903a3df06d8a0d4ad85c13de0515c0d257f8449f4e3c75f3a Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.914310 4644 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.25.89:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1880b3a224ded61f openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-13 06:49:22.913891871 +0000 UTC m=+225.128842704,LastTimestamp:2025-12-13 06:49:22.913891871 +0000 UTC m=+225.128842704,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.918392 4644 scope.go:117] "RemoveContainer" containerID="1a485eb058cb24ac1ad2c331be8dda06997200219f9f5fc2a597f37cd34d9532" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.930576 4644 scope.go:117] "RemoveContainer" containerID="3a5ec60ffb3914f4bef7e7105fd5b5ad42f867ba66e2f5ec1dbe5b8dcd47f92f" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.930923 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a5ec60ffb3914f4bef7e7105fd5b5ad42f867ba66e2f5ec1dbe5b8dcd47f92f\": container with ID starting with 3a5ec60ffb3914f4bef7e7105fd5b5ad42f867ba66e2f5ec1dbe5b8dcd47f92f not found: ID does not exist" containerID="3a5ec60ffb3914f4bef7e7105fd5b5ad42f867ba66e2f5ec1dbe5b8dcd47f92f" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.930974 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a5ec60ffb3914f4bef7e7105fd5b5ad42f867ba66e2f5ec1dbe5b8dcd47f92f"} err="failed to get container status \"3a5ec60ffb3914f4bef7e7105fd5b5ad42f867ba66e2f5ec1dbe5b8dcd47f92f\": rpc error: code = NotFound desc = could not find container \"3a5ec60ffb3914f4bef7e7105fd5b5ad42f867ba66e2f5ec1dbe5b8dcd47f92f\": container with ID starting with 3a5ec60ffb3914f4bef7e7105fd5b5ad42f867ba66e2f5ec1dbe5b8dcd47f92f not found: ID does not exist" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.931003 4644 scope.go:117] "RemoveContainer" containerID="18544f7158f117b0d332a235e59e34a9a6a082154d4b31619a3fb49fbd38d7b6" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.931300 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18544f7158f117b0d332a235e59e34a9a6a082154d4b31619a3fb49fbd38d7b6\": container with ID starting with 18544f7158f117b0d332a235e59e34a9a6a082154d4b31619a3fb49fbd38d7b6 not found: ID does not exist" containerID="18544f7158f117b0d332a235e59e34a9a6a082154d4b31619a3fb49fbd38d7b6" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.931327 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18544f7158f117b0d332a235e59e34a9a6a082154d4b31619a3fb49fbd38d7b6"} err="failed to get container status \"18544f7158f117b0d332a235e59e34a9a6a082154d4b31619a3fb49fbd38d7b6\": rpc error: code = NotFound desc = could not find container \"18544f7158f117b0d332a235e59e34a9a6a082154d4b31619a3fb49fbd38d7b6\": container with ID starting with 18544f7158f117b0d332a235e59e34a9a6a082154d4b31619a3fb49fbd38d7b6 not found: ID does not exist" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.931343 4644 scope.go:117] "RemoveContainer" containerID="1a485eb058cb24ac1ad2c331be8dda06997200219f9f5fc2a597f37cd34d9532" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.931730 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a485eb058cb24ac1ad2c331be8dda06997200219f9f5fc2a597f37cd34d9532\": container with ID starting with 1a485eb058cb24ac1ad2c331be8dda06997200219f9f5fc2a597f37cd34d9532 not found: ID does not exist" containerID="1a485eb058cb24ac1ad2c331be8dda06997200219f9f5fc2a597f37cd34d9532" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.931754 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a485eb058cb24ac1ad2c331be8dda06997200219f9f5fc2a597f37cd34d9532"} err="failed to get container status \"1a485eb058cb24ac1ad2c331be8dda06997200219f9f5fc2a597f37cd34d9532\": rpc error: code = NotFound desc = could not find container \"1a485eb058cb24ac1ad2c331be8dda06997200219f9f5fc2a597f37cd34d9532\": container with ID starting with 1a485eb058cb24ac1ad2c331be8dda06997200219f9f5fc2a597f37cd34d9532 not found: ID does not exist" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.931771 4644 scope.go:117] "RemoveContainer" containerID="15fa61a9e529b0e0eca5efafb4d687d9e4b08ddb595b16a2abdea0c61eff7963" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.946209 4644 scope.go:117] "RemoveContainer" containerID="15fa61a9e529b0e0eca5efafb4d687d9e4b08ddb595b16a2abdea0c61eff7963" Dec 13 06:49:22 crc kubenswrapper[4644]: E1213 06:49:22.946693 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15fa61a9e529b0e0eca5efafb4d687d9e4b08ddb595b16a2abdea0c61eff7963\": container with ID starting with 15fa61a9e529b0e0eca5efafb4d687d9e4b08ddb595b16a2abdea0c61eff7963 not found: ID does not exist" containerID="15fa61a9e529b0e0eca5efafb4d687d9e4b08ddb595b16a2abdea0c61eff7963" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.946730 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15fa61a9e529b0e0eca5efafb4d687d9e4b08ddb595b16a2abdea0c61eff7963"} err="failed to get container status \"15fa61a9e529b0e0eca5efafb4d687d9e4b08ddb595b16a2abdea0c61eff7963\": rpc error: code = NotFound desc = could not find container \"15fa61a9e529b0e0eca5efafb4d687d9e4b08ddb595b16a2abdea0c61eff7963\": container with ID starting with 15fa61a9e529b0e0eca5efafb4d687d9e4b08ddb595b16a2abdea0c61eff7963 not found: ID does not exist" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.966907 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.967073 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.967212 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.967349 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.967526 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.973347 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.973615 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.973791 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.973948 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.974102 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.981825 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.982061 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.982326 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.982670 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.982942 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.983209 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.983393 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.983604 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.983874 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:22 crc kubenswrapper[4644]: I1213 06:49:22.984188 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:23 crc kubenswrapper[4644]: E1213 06:49:23.088723 4644 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 13 06:49:23 crc kubenswrapper[4644]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-gb9x7_openshift-marketplace_33f05cad-7917-4d9a-870d-b68d4388bbde_0(1ebd063f7d96e01850c14284a4e498f93bfee42fff5263229a021d164c96ca65): error adding pod openshift-marketplace_marketplace-operator-79b997595-gb9x7 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"1ebd063f7d96e01850c14284a4e498f93bfee42fff5263229a021d164c96ca65" Netns:"/var/run/netns/14114f5e-bc50-4db4-8beb-f130ee2d08fc" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-gb9x7;K8S_POD_INFRA_CONTAINER_ID=1ebd063f7d96e01850c14284a4e498f93bfee42fff5263229a021d164c96ca65;K8S_POD_UID=33f05cad-7917-4d9a-870d-b68d4388bbde" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-gb9x7] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-gb9x7/33f05cad-7917-4d9a-870d-b68d4388bbde]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-gb9x7?timeout=1m0s": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:49:23 crc kubenswrapper[4644]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 13 06:49:23 crc kubenswrapper[4644]: > Dec 13 06:49:23 crc kubenswrapper[4644]: E1213 06:49:23.088989 4644 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 13 06:49:23 crc kubenswrapper[4644]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-gb9x7_openshift-marketplace_33f05cad-7917-4d9a-870d-b68d4388bbde_0(1ebd063f7d96e01850c14284a4e498f93bfee42fff5263229a021d164c96ca65): error adding pod openshift-marketplace_marketplace-operator-79b997595-gb9x7 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"1ebd063f7d96e01850c14284a4e498f93bfee42fff5263229a021d164c96ca65" Netns:"/var/run/netns/14114f5e-bc50-4db4-8beb-f130ee2d08fc" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-gb9x7;K8S_POD_INFRA_CONTAINER_ID=1ebd063f7d96e01850c14284a4e498f93bfee42fff5263229a021d164c96ca65;K8S_POD_UID=33f05cad-7917-4d9a-870d-b68d4388bbde" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-gb9x7] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-gb9x7/33f05cad-7917-4d9a-870d-b68d4388bbde]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-gb9x7?timeout=1m0s": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:49:23 crc kubenswrapper[4644]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 13 06:49:23 crc kubenswrapper[4644]: > pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:23 crc kubenswrapper[4644]: E1213 06:49:23.089013 4644 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 13 06:49:23 crc kubenswrapper[4644]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-gb9x7_openshift-marketplace_33f05cad-7917-4d9a-870d-b68d4388bbde_0(1ebd063f7d96e01850c14284a4e498f93bfee42fff5263229a021d164c96ca65): error adding pod openshift-marketplace_marketplace-operator-79b997595-gb9x7 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"1ebd063f7d96e01850c14284a4e498f93bfee42fff5263229a021d164c96ca65" Netns:"/var/run/netns/14114f5e-bc50-4db4-8beb-f130ee2d08fc" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-gb9x7;K8S_POD_INFRA_CONTAINER_ID=1ebd063f7d96e01850c14284a4e498f93bfee42fff5263229a021d164c96ca65;K8S_POD_UID=33f05cad-7917-4d9a-870d-b68d4388bbde" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-gb9x7] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-gb9x7/33f05cad-7917-4d9a-870d-b68d4388bbde]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-gb9x7?timeout=1m0s": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:49:23 crc kubenswrapper[4644]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 13 06:49:23 crc kubenswrapper[4644]: > pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:23 crc kubenswrapper[4644]: E1213 06:49:23.089068 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"marketplace-operator-79b997595-gb9x7_openshift-marketplace(33f05cad-7917-4d9a-870d-b68d4388bbde)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"marketplace-operator-79b997595-gb9x7_openshift-marketplace(33f05cad-7917-4d9a-870d-b68d4388bbde)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-gb9x7_openshift-marketplace_33f05cad-7917-4d9a-870d-b68d4388bbde_0(1ebd063f7d96e01850c14284a4e498f93bfee42fff5263229a021d164c96ca65): error adding pod openshift-marketplace_marketplace-operator-79b997595-gb9x7 to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"1ebd063f7d96e01850c14284a4e498f93bfee42fff5263229a021d164c96ca65\\\" Netns:\\\"/var/run/netns/14114f5e-bc50-4db4-8beb-f130ee2d08fc\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-gb9x7;K8S_POD_INFRA_CONTAINER_ID=1ebd063f7d96e01850c14284a4e498f93bfee42fff5263229a021d164c96ca65;K8S_POD_UID=33f05cad-7917-4d9a-870d-b68d4388bbde\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-gb9x7] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-gb9x7/33f05cad-7917-4d9a-870d-b68d4388bbde]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-gb9x7?timeout=1m0s\\\": dial tcp 192.168.25.89:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" podUID="33f05cad-7917-4d9a-870d-b68d4388bbde" Dec 13 06:49:23 crc kubenswrapper[4644]: E1213 06:49:23.277888 4644 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="800ms" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.676336 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"a167f0d33bb69e726768ada9b577a95f3724b31298f56775c70cf1ad36f08b86"} Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.676409 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"4e3c6774158e797903a3df06d8a0d4ad85c13de0515c0d257f8449f4e3c75f3a"} Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.677241 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:23 crc kubenswrapper[4644]: E1213 06:49:23.677168 4644 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 192.168.25.89:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.677623 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.678135 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.678615 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.679040 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.681723 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.684753 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.686968 4644 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7" exitCode=0 Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.687096 4644 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638" exitCode=0 Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.687111 4644 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2" exitCode=0 Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.687121 4644 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334" exitCode=2 Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.687200 4644 scope.go:117] "RemoveContainer" containerID="b62cc6c979f2e8bce394ea34b6400a6c68b8e60330781e54f37e17ac6e72e1dc" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.703508 4644 generic.go:334] "Generic (PLEG): container finished" podID="9beaac24-f4b1-461d-94d0-d798a89ce9a0" containerID="015945f1ae0449376ffcb272bc0933713ea96691f0a2d84f9dff66a8f012d957" exitCode=0 Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.703573 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"9beaac24-f4b1-461d-94d0-d798a89ce9a0","Type":"ContainerDied","Data":"015945f1ae0449376ffcb272bc0933713ea96691f0a2d84f9dff66a8f012d957"} Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.705096 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.705542 4644 status_manager.go:851] "Failed to get status for pod" podUID="9beaac24-f4b1-461d-94d0-d798a89ce9a0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.705885 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.706380 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.706434 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.706957 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.707030 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:23 crc kubenswrapper[4644]: I1213 06:49:23.707435 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:24 crc kubenswrapper[4644]: E1213 06:49:24.079365 4644 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="1.6s" Dec 13 06:49:24 crc kubenswrapper[4644]: E1213 06:49:24.302529 4644 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 13 06:49:24 crc kubenswrapper[4644]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-gb9x7_openshift-marketplace_33f05cad-7917-4d9a-870d-b68d4388bbde_0(c01f5d9ecb7dbba313b7e2710f411fefc67dfcf00b9e4cab38f1782dfd21130b): error adding pod openshift-marketplace_marketplace-operator-79b997595-gb9x7 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"c01f5d9ecb7dbba313b7e2710f411fefc67dfcf00b9e4cab38f1782dfd21130b" Netns:"/var/run/netns/db721c8a-a2eb-4a94-9cb1-e9f9513865b4" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-gb9x7;K8S_POD_INFRA_CONTAINER_ID=c01f5d9ecb7dbba313b7e2710f411fefc67dfcf00b9e4cab38f1782dfd21130b;K8S_POD_UID=33f05cad-7917-4d9a-870d-b68d4388bbde" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-gb9x7] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-gb9x7/33f05cad-7917-4d9a-870d-b68d4388bbde]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-gb9x7?timeout=1m0s": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:49:24 crc kubenswrapper[4644]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 13 06:49:24 crc kubenswrapper[4644]: > Dec 13 06:49:24 crc kubenswrapper[4644]: E1213 06:49:24.302639 4644 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 13 06:49:24 crc kubenswrapper[4644]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-gb9x7_openshift-marketplace_33f05cad-7917-4d9a-870d-b68d4388bbde_0(c01f5d9ecb7dbba313b7e2710f411fefc67dfcf00b9e4cab38f1782dfd21130b): error adding pod openshift-marketplace_marketplace-operator-79b997595-gb9x7 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"c01f5d9ecb7dbba313b7e2710f411fefc67dfcf00b9e4cab38f1782dfd21130b" Netns:"/var/run/netns/db721c8a-a2eb-4a94-9cb1-e9f9513865b4" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-gb9x7;K8S_POD_INFRA_CONTAINER_ID=c01f5d9ecb7dbba313b7e2710f411fefc67dfcf00b9e4cab38f1782dfd21130b;K8S_POD_UID=33f05cad-7917-4d9a-870d-b68d4388bbde" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-gb9x7] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-gb9x7/33f05cad-7917-4d9a-870d-b68d4388bbde]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-gb9x7?timeout=1m0s": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:49:24 crc kubenswrapper[4644]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 13 06:49:24 crc kubenswrapper[4644]: > pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:24 crc kubenswrapper[4644]: E1213 06:49:24.302662 4644 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 13 06:49:24 crc kubenswrapper[4644]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-gb9x7_openshift-marketplace_33f05cad-7917-4d9a-870d-b68d4388bbde_0(c01f5d9ecb7dbba313b7e2710f411fefc67dfcf00b9e4cab38f1782dfd21130b): error adding pod openshift-marketplace_marketplace-operator-79b997595-gb9x7 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"c01f5d9ecb7dbba313b7e2710f411fefc67dfcf00b9e4cab38f1782dfd21130b" Netns:"/var/run/netns/db721c8a-a2eb-4a94-9cb1-e9f9513865b4" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-gb9x7;K8S_POD_INFRA_CONTAINER_ID=c01f5d9ecb7dbba313b7e2710f411fefc67dfcf00b9e4cab38f1782dfd21130b;K8S_POD_UID=33f05cad-7917-4d9a-870d-b68d4388bbde" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-gb9x7] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-gb9x7/33f05cad-7917-4d9a-870d-b68d4388bbde]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-gb9x7?timeout=1m0s": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:49:24 crc kubenswrapper[4644]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 13 06:49:24 crc kubenswrapper[4644]: > pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:24 crc kubenswrapper[4644]: E1213 06:49:24.302735 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"marketplace-operator-79b997595-gb9x7_openshift-marketplace(33f05cad-7917-4d9a-870d-b68d4388bbde)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"marketplace-operator-79b997595-gb9x7_openshift-marketplace(33f05cad-7917-4d9a-870d-b68d4388bbde)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-gb9x7_openshift-marketplace_33f05cad-7917-4d9a-870d-b68d4388bbde_0(c01f5d9ecb7dbba313b7e2710f411fefc67dfcf00b9e4cab38f1782dfd21130b): error adding pod openshift-marketplace_marketplace-operator-79b997595-gb9x7 to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"c01f5d9ecb7dbba313b7e2710f411fefc67dfcf00b9e4cab38f1782dfd21130b\\\" Netns:\\\"/var/run/netns/db721c8a-a2eb-4a94-9cb1-e9f9513865b4\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-gb9x7;K8S_POD_INFRA_CONTAINER_ID=c01f5d9ecb7dbba313b7e2710f411fefc67dfcf00b9e4cab38f1782dfd21130b;K8S_POD_UID=33f05cad-7917-4d9a-870d-b68d4388bbde\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-gb9x7] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-gb9x7/33f05cad-7917-4d9a-870d-b68d4388bbde]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-gb9x7?timeout=1m0s\\\": dial tcp 192.168.25.89:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" podUID="33f05cad-7917-4d9a-870d-b68d4388bbde" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.717095 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.911146 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.912015 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.913094 4644 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.913763 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.914115 4644 status_manager.go:851] "Failed to get status for pod" podUID="9beaac24-f4b1-461d-94d0-d798a89ce9a0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.914534 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.914814 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.915119 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.915391 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.979122 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.979726 4644 status_manager.go:851] "Failed to get status for pod" podUID="9beaac24-f4b1-461d-94d0-d798a89ce9a0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.980016 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.980380 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.980730 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.981127 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.981388 4644 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:24 crc kubenswrapper[4644]: I1213 06:49:24.981651 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.016076 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.016148 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.016224 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.016242 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.016311 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.016464 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.016820 4644 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.016843 4644 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.016854 4644 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.117566 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9beaac24-f4b1-461d-94d0-d798a89ce9a0-kubelet-dir\") pod \"9beaac24-f4b1-461d-94d0-d798a89ce9a0\" (UID: \"9beaac24-f4b1-461d-94d0-d798a89ce9a0\") " Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.117673 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/9beaac24-f4b1-461d-94d0-d798a89ce9a0-var-lock\") pod \"9beaac24-f4b1-461d-94d0-d798a89ce9a0\" (UID: \"9beaac24-f4b1-461d-94d0-d798a89ce9a0\") " Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.117743 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9beaac24-f4b1-461d-94d0-d798a89ce9a0-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "9beaac24-f4b1-461d-94d0-d798a89ce9a0" (UID: "9beaac24-f4b1-461d-94d0-d798a89ce9a0"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.117771 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9beaac24-f4b1-461d-94d0-d798a89ce9a0-kube-api-access\") pod \"9beaac24-f4b1-461d-94d0-d798a89ce9a0\" (UID: \"9beaac24-f4b1-461d-94d0-d798a89ce9a0\") " Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.117849 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9beaac24-f4b1-461d-94d0-d798a89ce9a0-var-lock" (OuterVolumeSpecName: "var-lock") pod "9beaac24-f4b1-461d-94d0-d798a89ce9a0" (UID: "9beaac24-f4b1-461d-94d0-d798a89ce9a0"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.118330 4644 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9beaac24-f4b1-461d-94d0-d798a89ce9a0-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.118346 4644 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/9beaac24-f4b1-461d-94d0-d798a89ce9a0-var-lock\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.122330 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9beaac24-f4b1-461d-94d0-d798a89ce9a0-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "9beaac24-f4b1-461d-94d0-d798a89ce9a0" (UID: "9beaac24-f4b1-461d-94d0-d798a89ce9a0"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.219731 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9beaac24-f4b1-461d-94d0-d798a89ce9a0-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 06:49:25 crc kubenswrapper[4644]: E1213 06:49:25.680950 4644 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="3.2s" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.725005 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.724997 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"9beaac24-f4b1-461d-94d0-d798a89ce9a0","Type":"ContainerDied","Data":"b36d4c3b31f316536b9fad737bdeee73e595f5562e0b53ce85c7c44c9963fb20"} Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.725120 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b36d4c3b31f316536b9fad737bdeee73e595f5562e0b53ce85c7c44c9963fb20" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.731470 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.732558 4644 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844" exitCode=0 Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.732630 4644 scope.go:117] "RemoveContainer" containerID="e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.732661 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.747870 4644 status_manager.go:851] "Failed to get status for pod" podUID="9beaac24-f4b1-461d-94d0-d798a89ce9a0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.748790 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.749144 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.749483 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.749736 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.750076 4644 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.750339 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.750679 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.751018 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.751841 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.752159 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.752422 4644 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.752694 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.752942 4644 status_manager.go:851] "Failed to get status for pod" podUID="9beaac24-f4b1-461d-94d0-d798a89ce9a0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.754651 4644 scope.go:117] "RemoveContainer" containerID="d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.765881 4644 scope.go:117] "RemoveContainer" containerID="76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.784267 4644 scope.go:117] "RemoveContainer" containerID="2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.797332 4644 scope.go:117] "RemoveContainer" containerID="0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.809826 4644 scope.go:117] "RemoveContainer" containerID="fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.826478 4644 scope.go:117] "RemoveContainer" containerID="e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7" Dec 13 06:49:25 crc kubenswrapper[4644]: E1213 06:49:25.826860 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\": container with ID starting with e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7 not found: ID does not exist" containerID="e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.826912 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7"} err="failed to get container status \"e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\": rpc error: code = NotFound desc = could not find container \"e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7\": container with ID starting with e91cd9dfe6406f4a98799cb9ca0847bf11220ce62c700b8e149f111cbc167ce7 not found: ID does not exist" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.826949 4644 scope.go:117] "RemoveContainer" containerID="d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638" Dec 13 06:49:25 crc kubenswrapper[4644]: E1213 06:49:25.827421 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\": container with ID starting with d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638 not found: ID does not exist" containerID="d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.828327 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638"} err="failed to get container status \"d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\": rpc error: code = NotFound desc = could not find container \"d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638\": container with ID starting with d8dec7a66f98bfbb077e6046aa48050c81ebe8a54897c726fe012ddd3667c638 not found: ID does not exist" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.828372 4644 scope.go:117] "RemoveContainer" containerID="76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2" Dec 13 06:49:25 crc kubenswrapper[4644]: E1213 06:49:25.829148 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\": container with ID starting with 76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2 not found: ID does not exist" containerID="76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.829254 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2"} err="failed to get container status \"76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\": rpc error: code = NotFound desc = could not find container \"76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2\": container with ID starting with 76ed9d9677a61fc587cb1dae11c6be4e2ed2b77b226fecbfaebd05b2ea013ab2 not found: ID does not exist" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.829287 4644 scope.go:117] "RemoveContainer" containerID="2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334" Dec 13 06:49:25 crc kubenswrapper[4644]: E1213 06:49:25.829800 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\": container with ID starting with 2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334 not found: ID does not exist" containerID="2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.829846 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334"} err="failed to get container status \"2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\": rpc error: code = NotFound desc = could not find container \"2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334\": container with ID starting with 2ef5581432aaacb1d1b3a1b5eb9f8801b1cc3d95f449d0a0eddc1432c14e8334 not found: ID does not exist" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.829869 4644 scope.go:117] "RemoveContainer" containerID="0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844" Dec 13 06:49:25 crc kubenswrapper[4644]: E1213 06:49:25.830128 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\": container with ID starting with 0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844 not found: ID does not exist" containerID="0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.830158 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844"} err="failed to get container status \"0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\": rpc error: code = NotFound desc = could not find container \"0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844\": container with ID starting with 0d2d0f534c3b7b35bc0cf809a19fcab7c8843b0cf11530957aab2157745de844 not found: ID does not exist" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.830179 4644 scope.go:117] "RemoveContainer" containerID="fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68" Dec 13 06:49:25 crc kubenswrapper[4644]: E1213 06:49:25.830528 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\": container with ID starting with fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68 not found: ID does not exist" containerID="fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68" Dec 13 06:49:25 crc kubenswrapper[4644]: I1213 06:49:25.830548 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68"} err="failed to get container status \"fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\": rpc error: code = NotFound desc = could not find container \"fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68\": container with ID starting with fe33a1b0394f066244c5a960ecd4289ef46536293526775a7db42840f9cd7e68 not found: ID does not exist" Dec 13 06:49:26 crc kubenswrapper[4644]: I1213 06:49:26.396076 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 13 06:49:26 crc kubenswrapper[4644]: E1213 06:49:26.925419 4644 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.25.89:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1880b3a224ded61f openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-13 06:49:22.913891871 +0000 UTC m=+225.128842704,LastTimestamp:2025-12-13 06:49:22.913891871 +0000 UTC m=+225.128842704,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 13 06:49:28 crc kubenswrapper[4644]: I1213 06:49:28.391388 4644 status_manager.go:851] "Failed to get status for pod" podUID="9beaac24-f4b1-461d-94d0-d798a89ce9a0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:28 crc kubenswrapper[4644]: I1213 06:49:28.391641 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:28 crc kubenswrapper[4644]: I1213 06:49:28.391904 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:28 crc kubenswrapper[4644]: I1213 06:49:28.392252 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:28 crc kubenswrapper[4644]: I1213 06:49:28.392552 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:28 crc kubenswrapper[4644]: I1213 06:49:28.392891 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:28 crc kubenswrapper[4644]: E1213 06:49:28.882520 4644 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="6.4s" Dec 13 06:49:29 crc kubenswrapper[4644]: E1213 06:49:29.921658 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e3eddf5_6aec_4c15_8ae7_a7258d60be4e.slice/crio-278812383a31cbe8c7aebda128041970bc1f9109f277592a1b1416fd27928d5e\": RecentStats: unable to find data in memory cache]" Dec 13 06:49:31 crc kubenswrapper[4644]: E1213 06:49:31.408340 4644 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 192.168.25.89:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" volumeName="registry-storage" Dec 13 06:49:35 crc kubenswrapper[4644]: E1213 06:49:35.283562 4644 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.89:6443: connect: connection refused" interval="7s" Dec 13 06:49:35 crc kubenswrapper[4644]: I1213 06:49:35.388401 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:35 crc kubenswrapper[4644]: I1213 06:49:35.388937 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:35 crc kubenswrapper[4644]: I1213 06:49:35.777279 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 13 06:49:35 crc kubenswrapper[4644]: I1213 06:49:35.777592 4644 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe" exitCode=1 Dec 13 06:49:35 crc kubenswrapper[4644]: I1213 06:49:35.777627 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe"} Dec 13 06:49:35 crc kubenswrapper[4644]: I1213 06:49:35.778058 4644 scope.go:117] "RemoveContainer" containerID="8a9dc2872654213bf0f3d873eaf2c6a436fdf222b3a867970aac8afc28e8a9fe" Dec 13 06:49:35 crc kubenswrapper[4644]: I1213 06:49:35.778428 4644 status_manager.go:851] "Failed to get status for pod" podUID="9beaac24-f4b1-461d-94d0-d798a89ce9a0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:35 crc kubenswrapper[4644]: I1213 06:49:35.778735 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:35 crc kubenswrapper[4644]: I1213 06:49:35.779067 4644 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:35 crc kubenswrapper[4644]: I1213 06:49:35.779523 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:35 crc kubenswrapper[4644]: I1213 06:49:35.779891 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:35 crc kubenswrapper[4644]: I1213 06:49:35.780325 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:35 crc kubenswrapper[4644]: I1213 06:49:35.780606 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:35 crc kubenswrapper[4644]: E1213 06:49:35.945809 4644 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 13 06:49:35 crc kubenswrapper[4644]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-gb9x7_openshift-marketplace_33f05cad-7917-4d9a-870d-b68d4388bbde_0(f8414840bf3ac17b6c0e59daa37db1ca88ea1f12cd42465826d5a0fe40d9c75e): error adding pod openshift-marketplace_marketplace-operator-79b997595-gb9x7 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"f8414840bf3ac17b6c0e59daa37db1ca88ea1f12cd42465826d5a0fe40d9c75e" Netns:"/var/run/netns/8557a01a-ed5f-40a5-b2f2-dc726a51e9e3" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-gb9x7;K8S_POD_INFRA_CONTAINER_ID=f8414840bf3ac17b6c0e59daa37db1ca88ea1f12cd42465826d5a0fe40d9c75e;K8S_POD_UID=33f05cad-7917-4d9a-870d-b68d4388bbde" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-gb9x7] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-gb9x7/33f05cad-7917-4d9a-870d-b68d4388bbde]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-gb9x7?timeout=1m0s": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:49:35 crc kubenswrapper[4644]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 13 06:49:35 crc kubenswrapper[4644]: > Dec 13 06:49:35 crc kubenswrapper[4644]: E1213 06:49:35.945904 4644 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 13 06:49:35 crc kubenswrapper[4644]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-gb9x7_openshift-marketplace_33f05cad-7917-4d9a-870d-b68d4388bbde_0(f8414840bf3ac17b6c0e59daa37db1ca88ea1f12cd42465826d5a0fe40d9c75e): error adding pod openshift-marketplace_marketplace-operator-79b997595-gb9x7 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"f8414840bf3ac17b6c0e59daa37db1ca88ea1f12cd42465826d5a0fe40d9c75e" Netns:"/var/run/netns/8557a01a-ed5f-40a5-b2f2-dc726a51e9e3" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-gb9x7;K8S_POD_INFRA_CONTAINER_ID=f8414840bf3ac17b6c0e59daa37db1ca88ea1f12cd42465826d5a0fe40d9c75e;K8S_POD_UID=33f05cad-7917-4d9a-870d-b68d4388bbde" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-gb9x7] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-gb9x7/33f05cad-7917-4d9a-870d-b68d4388bbde]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-gb9x7?timeout=1m0s": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:49:35 crc kubenswrapper[4644]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 13 06:49:35 crc kubenswrapper[4644]: > pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:35 crc kubenswrapper[4644]: E1213 06:49:35.945929 4644 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 13 06:49:35 crc kubenswrapper[4644]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-gb9x7_openshift-marketplace_33f05cad-7917-4d9a-870d-b68d4388bbde_0(f8414840bf3ac17b6c0e59daa37db1ca88ea1f12cd42465826d5a0fe40d9c75e): error adding pod openshift-marketplace_marketplace-operator-79b997595-gb9x7 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"f8414840bf3ac17b6c0e59daa37db1ca88ea1f12cd42465826d5a0fe40d9c75e" Netns:"/var/run/netns/8557a01a-ed5f-40a5-b2f2-dc726a51e9e3" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-gb9x7;K8S_POD_INFRA_CONTAINER_ID=f8414840bf3ac17b6c0e59daa37db1ca88ea1f12cd42465826d5a0fe40d9c75e;K8S_POD_UID=33f05cad-7917-4d9a-870d-b68d4388bbde" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-gb9x7] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-gb9x7/33f05cad-7917-4d9a-870d-b68d4388bbde]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-gb9x7?timeout=1m0s": dial tcp 192.168.25.89:6443: connect: connection refused Dec 13 06:49:35 crc kubenswrapper[4644]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 13 06:49:35 crc kubenswrapper[4644]: > pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:35 crc kubenswrapper[4644]: E1213 06:49:35.946000 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"marketplace-operator-79b997595-gb9x7_openshift-marketplace(33f05cad-7917-4d9a-870d-b68d4388bbde)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"marketplace-operator-79b997595-gb9x7_openshift-marketplace(33f05cad-7917-4d9a-870d-b68d4388bbde)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-gb9x7_openshift-marketplace_33f05cad-7917-4d9a-870d-b68d4388bbde_0(f8414840bf3ac17b6c0e59daa37db1ca88ea1f12cd42465826d5a0fe40d9c75e): error adding pod openshift-marketplace_marketplace-operator-79b997595-gb9x7 to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"f8414840bf3ac17b6c0e59daa37db1ca88ea1f12cd42465826d5a0fe40d9c75e\\\" Netns:\\\"/var/run/netns/8557a01a-ed5f-40a5-b2f2-dc726a51e9e3\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-gb9x7;K8S_POD_INFRA_CONTAINER_ID=f8414840bf3ac17b6c0e59daa37db1ca88ea1f12cd42465826d5a0fe40d9c75e;K8S_POD_UID=33f05cad-7917-4d9a-870d-b68d4388bbde\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-gb9x7] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-gb9x7/33f05cad-7917-4d9a-870d-b68d4388bbde]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-gb9x7 in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-gb9x7?timeout=1m0s\\\": dial tcp 192.168.25.89:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" podUID="33f05cad-7917-4d9a-870d-b68d4388bbde" Dec 13 06:49:36 crc kubenswrapper[4644]: I1213 06:49:36.785397 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 13 06:49:36 crc kubenswrapper[4644]: I1213 06:49:36.785753 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ed2665439d787209ac34f60063db1045623b37ea1f81e6576e18668ed0b473b6"} Dec 13 06:49:36 crc kubenswrapper[4644]: I1213 06:49:36.786557 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:36 crc kubenswrapper[4644]: I1213 06:49:36.786963 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:36 crc kubenswrapper[4644]: I1213 06:49:36.787338 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:36 crc kubenswrapper[4644]: I1213 06:49:36.787844 4644 status_manager.go:851] "Failed to get status for pod" podUID="9beaac24-f4b1-461d-94d0-d798a89ce9a0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:36 crc kubenswrapper[4644]: I1213 06:49:36.788144 4644 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:36 crc kubenswrapper[4644]: I1213 06:49:36.788390 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:36 crc kubenswrapper[4644]: I1213 06:49:36.788677 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:36 crc kubenswrapper[4644]: E1213 06:49:36.926787 4644 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.25.89:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1880b3a224ded61f openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-13 06:49:22.913891871 +0000 UTC m=+225.128842704,LastTimestamp:2025-12-13 06:49:22.913891871 +0000 UTC m=+225.128842704,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.388252 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.388823 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.389051 4644 status_manager.go:851] "Failed to get status for pod" podUID="9beaac24-f4b1-461d-94d0-d798a89ce9a0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.389307 4644 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.389602 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.389972 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.390270 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.390537 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.397786 4644 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a392571-7fa5-4852-8367-c4c35c10b2e3" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.397807 4644 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a392571-7fa5-4852-8367-c4c35c10b2e3" Dec 13 06:49:37 crc kubenswrapper[4644]: E1213 06:49:37.398066 4644 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.398545 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:37 crc kubenswrapper[4644]: W1213 06:49:37.411032 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-225e3ef97c651d0637883a0a9d9b40edf7a5dc308733b3a0182f827225a2319e WatchSource:0}: Error finding container 225e3ef97c651d0637883a0a9d9b40edf7a5dc308733b3a0182f827225a2319e: Status 404 returned error can't find the container with id 225e3ef97c651d0637883a0a9d9b40edf7a5dc308733b3a0182f827225a2319e Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.791164 4644 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="edcc965a5264da6244ba686e69ccba6be817194eb1487289f22720a8e0cb1f44" exitCode=0 Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.791252 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"edcc965a5264da6244ba686e69ccba6be817194eb1487289f22720a8e0cb1f44"} Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.791399 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"225e3ef97c651d0637883a0a9d9b40edf7a5dc308733b3a0182f827225a2319e"} Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.791687 4644 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a392571-7fa5-4852-8367-c4c35c10b2e3" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.791702 4644 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a392571-7fa5-4852-8367-c4c35c10b2e3" Dec 13 06:49:37 crc kubenswrapper[4644]: E1213 06:49:37.792198 4644 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.792237 4644 status_manager.go:851] "Failed to get status for pod" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" pod="openshift-marketplace/redhat-operators-gqhfh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-gqhfh\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.792671 4644 status_manager.go:851] "Failed to get status for pod" podUID="9beaac24-f4b1-461d-94d0-d798a89ce9a0" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.792936 4644 status_manager.go:851] "Failed to get status for pod" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" pod="openshift-marketplace/marketplace-operator-79b997595-crmqj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-crmqj\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.793229 4644 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.793506 4644 status_manager.go:851] "Failed to get status for pod" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" pod="openshift-marketplace/community-operators-2bmvc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2bmvc\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.793700 4644 status_manager.go:851] "Failed to get status for pod" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" pod="openshift-marketplace/certified-operators-54mm5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-54mm5\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:37 crc kubenswrapper[4644]: I1213 06:49:37.793929 4644 status_manager.go:851] "Failed to get status for pod" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" pod="openshift-marketplace/redhat-marketplace-pngbm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pngbm\": dial tcp 192.168.25.89:6443: connect: connection refused" Dec 13 06:49:38 crc kubenswrapper[4644]: I1213 06:49:38.798654 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"57c9b931671b5f9b80f94e6b0088e601a7c5c77f313018de2850f5cbd7b23d44"} Dec 13 06:49:38 crc kubenswrapper[4644]: I1213 06:49:38.798696 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"af0e3c38399432dbcc4947c571c82045bd5fb0714eceb41999cc33d6159b5dfe"} Dec 13 06:49:38 crc kubenswrapper[4644]: I1213 06:49:38.798706 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d65e0616e345f1dcedaf32ef1f229787cfb1073a159dd6d84f0168914b88c9c3"} Dec 13 06:49:38 crc kubenswrapper[4644]: I1213 06:49:38.798714 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"18dca2a5f6e707c33ef701668f7e60af31e45021e7e0463e3b0a84d97f161d6e"} Dec 13 06:49:38 crc kubenswrapper[4644]: I1213 06:49:38.798722 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7308a3b1349631a05c3cd6ba3609ed04895864405305a249d391ab07418ae983"} Dec 13 06:49:38 crc kubenswrapper[4644]: I1213 06:49:38.798859 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:38 crc kubenswrapper[4644]: I1213 06:49:38.798931 4644 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a392571-7fa5-4852-8367-c4c35c10b2e3" Dec 13 06:49:38 crc kubenswrapper[4644]: I1213 06:49:38.798945 4644 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a392571-7fa5-4852-8367-c4c35c10b2e3" Dec 13 06:49:41 crc kubenswrapper[4644]: I1213 06:49:41.637758 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:49:42 crc kubenswrapper[4644]: I1213 06:49:42.399480 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:42 crc kubenswrapper[4644]: I1213 06:49:42.399526 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:42 crc kubenswrapper[4644]: I1213 06:49:42.404731 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:43 crc kubenswrapper[4644]: I1213 06:49:43.923257 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:49:43 crc kubenswrapper[4644]: I1213 06:49:43.926274 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:49:44 crc kubenswrapper[4644]: I1213 06:49:44.369337 4644 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:49:44 crc kubenswrapper[4644]: I1213 06:49:44.392069 4644 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a392571-7fa5-4852-8367-c4c35c10b2e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:49:37Z\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-cert-syncer kube-apiserver-cert-regeneration-controller kube-apiserver-insecure-readyz kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T06:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-cert-syncer kube-apiserver-cert-regeneration-controller kube-apiserver-insecure-readyz kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}}}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://edcc965a5264da6244ba686e69ccba6be817194eb1487289f22720a8e0cb1f44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://edcc965a5264da6244ba686e69ccba6be817194eb1487289f22720a8e0cb1f44\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T06:49:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T06:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Pending\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Pod \"kube-apiserver-crc\" is invalid: metadata.uid: Invalid value: \"1a392571-7fa5-4852-8367-c4c35c10b2e3\": field is immutable" Dec 13 06:49:44 crc kubenswrapper[4644]: I1213 06:49:44.515460 4644 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="def85d88-7da1-4069-b258-d2ccbf9f138f" Dec 13 06:49:44 crc kubenswrapper[4644]: I1213 06:49:44.819123 4644 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a392571-7fa5-4852-8367-c4c35c10b2e3" Dec 13 06:49:44 crc kubenswrapper[4644]: I1213 06:49:44.819157 4644 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="1a392571-7fa5-4852-8367-c4c35c10b2e3" Dec 13 06:49:44 crc kubenswrapper[4644]: I1213 06:49:44.822513 4644 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="def85d88-7da1-4069-b258-d2ccbf9f138f" Dec 13 06:49:50 crc kubenswrapper[4644]: I1213 06:49:50.388836 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:50 crc kubenswrapper[4644]: I1213 06:49:50.389395 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:50 crc kubenswrapper[4644]: I1213 06:49:50.843477 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" event={"ID":"33f05cad-7917-4d9a-870d-b68d4388bbde","Type":"ContainerStarted","Data":"7904a3d08bbb017a4eb3a23c8f08da31efc4e8f90d24569f4e900298ea8f8844"} Dec 13 06:49:50 crc kubenswrapper[4644]: I1213 06:49:50.843750 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" event={"ID":"33f05cad-7917-4d9a-870d-b68d4388bbde","Type":"ContainerStarted","Data":"531756221b7adeda4be7ee41a8637f6174d4c2a4c1012401bb16d67b6cd32db9"} Dec 13 06:49:50 crc kubenswrapper[4644]: I1213 06:49:50.844116 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:50 crc kubenswrapper[4644]: I1213 06:49:50.845781 4644 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-gb9x7 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.64:8080/healthz\": dial tcp 10.217.0.64:8080: connect: connection refused" start-of-body= Dec 13 06:49:50 crc kubenswrapper[4644]: I1213 06:49:50.845833 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" podUID="33f05cad-7917-4d9a-870d-b68d4388bbde" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.64:8080/healthz\": dial tcp 10.217.0.64:8080: connect: connection refused" Dec 13 06:49:51 crc kubenswrapper[4644]: I1213 06:49:51.641373 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 06:49:51 crc kubenswrapper[4644]: I1213 06:49:51.849816 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-gb9x7_33f05cad-7917-4d9a-870d-b68d4388bbde/marketplace-operator/0.log" Dec 13 06:49:51 crc kubenswrapper[4644]: I1213 06:49:51.850028 4644 generic.go:334] "Generic (PLEG): container finished" podID="33f05cad-7917-4d9a-870d-b68d4388bbde" containerID="7904a3d08bbb017a4eb3a23c8f08da31efc4e8f90d24569f4e900298ea8f8844" exitCode=1 Dec 13 06:49:51 crc kubenswrapper[4644]: I1213 06:49:51.850057 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" event={"ID":"33f05cad-7917-4d9a-870d-b68d4388bbde","Type":"ContainerDied","Data":"7904a3d08bbb017a4eb3a23c8f08da31efc4e8f90d24569f4e900298ea8f8844"} Dec 13 06:49:51 crc kubenswrapper[4644]: I1213 06:49:51.850765 4644 scope.go:117] "RemoveContainer" containerID="7904a3d08bbb017a4eb3a23c8f08da31efc4e8f90d24569f4e900298ea8f8844" Dec 13 06:49:52 crc kubenswrapper[4644]: I1213 06:49:52.332541 4644 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:49:52 crc kubenswrapper[4644]: I1213 06:49:52.857663 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-gb9x7_33f05cad-7917-4d9a-870d-b68d4388bbde/marketplace-operator/1.log" Dec 13 06:49:52 crc kubenswrapper[4644]: I1213 06:49:52.858224 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-gb9x7_33f05cad-7917-4d9a-870d-b68d4388bbde/marketplace-operator/0.log" Dec 13 06:49:52 crc kubenswrapper[4644]: I1213 06:49:52.858276 4644 generic.go:334] "Generic (PLEG): container finished" podID="33f05cad-7917-4d9a-870d-b68d4388bbde" containerID="41d9f431a026e16bcd1a0261f98a3d8767123286e6f8ee303dfbe8fd087f7a1c" exitCode=1 Dec 13 06:49:52 crc kubenswrapper[4644]: I1213 06:49:52.858309 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" event={"ID":"33f05cad-7917-4d9a-870d-b68d4388bbde","Type":"ContainerDied","Data":"41d9f431a026e16bcd1a0261f98a3d8767123286e6f8ee303dfbe8fd087f7a1c"} Dec 13 06:49:52 crc kubenswrapper[4644]: I1213 06:49:52.858349 4644 scope.go:117] "RemoveContainer" containerID="7904a3d08bbb017a4eb3a23c8f08da31efc4e8f90d24569f4e900298ea8f8844" Dec 13 06:49:52 crc kubenswrapper[4644]: I1213 06:49:52.858918 4644 scope.go:117] "RemoveContainer" containerID="41d9f431a026e16bcd1a0261f98a3d8767123286e6f8ee303dfbe8fd087f7a1c" Dec 13 06:49:52 crc kubenswrapper[4644]: E1213 06:49:52.859137 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-gb9x7_openshift-marketplace(33f05cad-7917-4d9a-870d-b68d4388bbde)\"" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" podUID="33f05cad-7917-4d9a-870d-b68d4388bbde" Dec 13 06:49:53 crc kubenswrapper[4644]: I1213 06:49:53.494208 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 13 06:49:53 crc kubenswrapper[4644]: I1213 06:49:53.557301 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 13 06:49:53 crc kubenswrapper[4644]: I1213 06:49:53.862815 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-gb9x7_33f05cad-7917-4d9a-870d-b68d4388bbde/marketplace-operator/1.log" Dec 13 06:49:53 crc kubenswrapper[4644]: I1213 06:49:53.863191 4644 scope.go:117] "RemoveContainer" containerID="41d9f431a026e16bcd1a0261f98a3d8767123286e6f8ee303dfbe8fd087f7a1c" Dec 13 06:49:53 crc kubenswrapper[4644]: E1213 06:49:53.863360 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-gb9x7_openshift-marketplace(33f05cad-7917-4d9a-870d-b68d4388bbde)\"" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" podUID="33f05cad-7917-4d9a-870d-b68d4388bbde" Dec 13 06:49:53 crc kubenswrapper[4644]: I1213 06:49:53.880043 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 13 06:49:54 crc kubenswrapper[4644]: I1213 06:49:54.509520 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 13 06:49:54 crc kubenswrapper[4644]: I1213 06:49:54.858823 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 13 06:49:54 crc kubenswrapper[4644]: I1213 06:49:54.895829 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 13 06:49:55 crc kubenswrapper[4644]: I1213 06:49:55.092733 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 13 06:49:55 crc kubenswrapper[4644]: I1213 06:49:55.099053 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 13 06:49:55 crc kubenswrapper[4644]: I1213 06:49:55.173826 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 13 06:49:55 crc kubenswrapper[4644]: I1213 06:49:55.229227 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 13 06:49:55 crc kubenswrapper[4644]: I1213 06:49:55.389050 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 13 06:49:55 crc kubenswrapper[4644]: I1213 06:49:55.688433 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 13 06:49:55 crc kubenswrapper[4644]: I1213 06:49:55.735258 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 13 06:49:55 crc kubenswrapper[4644]: I1213 06:49:55.782701 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 13 06:49:55 crc kubenswrapper[4644]: I1213 06:49:55.933303 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 13 06:49:55 crc kubenswrapper[4644]: I1213 06:49:55.948618 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 13 06:49:56 crc kubenswrapper[4644]: I1213 06:49:56.697156 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 13 06:49:56 crc kubenswrapper[4644]: I1213 06:49:56.791145 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 13 06:49:56 crc kubenswrapper[4644]: I1213 06:49:56.829636 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 13 06:49:56 crc kubenswrapper[4644]: I1213 06:49:56.983566 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 13 06:49:57 crc kubenswrapper[4644]: I1213 06:49:57.000436 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 13 06:49:57 crc kubenswrapper[4644]: I1213 06:49:57.068954 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 13 06:49:57 crc kubenswrapper[4644]: I1213 06:49:57.149715 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 13 06:49:57 crc kubenswrapper[4644]: I1213 06:49:57.275113 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 13 06:49:57 crc kubenswrapper[4644]: I1213 06:49:57.403810 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 13 06:49:57 crc kubenswrapper[4644]: I1213 06:49:57.436630 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 13 06:49:57 crc kubenswrapper[4644]: I1213 06:49:57.457650 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 13 06:49:57 crc kubenswrapper[4644]: I1213 06:49:57.696407 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 13 06:49:57 crc kubenswrapper[4644]: I1213 06:49:57.697141 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 13 06:49:57 crc kubenswrapper[4644]: I1213 06:49:57.711534 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 13 06:49:57 crc kubenswrapper[4644]: I1213 06:49:57.716996 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 13 06:49:57 crc kubenswrapper[4644]: I1213 06:49:57.817544 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 13 06:49:57 crc kubenswrapper[4644]: I1213 06:49:57.829934 4644 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 13 06:49:57 crc kubenswrapper[4644]: I1213 06:49:57.952482 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 13 06:49:58 crc kubenswrapper[4644]: I1213 06:49:58.241335 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 13 06:49:58 crc kubenswrapper[4644]: I1213 06:49:58.269545 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 13 06:49:58 crc kubenswrapper[4644]: I1213 06:49:58.407057 4644 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 13 06:49:58 crc kubenswrapper[4644]: I1213 06:49:58.443008 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 13 06:49:58 crc kubenswrapper[4644]: I1213 06:49:58.497257 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 13 06:49:58 crc kubenswrapper[4644]: I1213 06:49:58.617423 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 13 06:49:58 crc kubenswrapper[4644]: I1213 06:49:58.688125 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 13 06:49:58 crc kubenswrapper[4644]: I1213 06:49:58.728240 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 13 06:49:59 crc kubenswrapper[4644]: I1213 06:49:59.138624 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 13 06:49:59 crc kubenswrapper[4644]: I1213 06:49:59.208269 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 13 06:49:59 crc kubenswrapper[4644]: I1213 06:49:59.258781 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 13 06:49:59 crc kubenswrapper[4644]: I1213 06:49:59.375616 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 13 06:49:59 crc kubenswrapper[4644]: I1213 06:49:59.446889 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 13 06:49:59 crc kubenswrapper[4644]: I1213 06:49:59.450898 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 13 06:49:59 crc kubenswrapper[4644]: I1213 06:49:59.482146 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 13 06:49:59 crc kubenswrapper[4644]: I1213 06:49:59.549126 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 13 06:49:59 crc kubenswrapper[4644]: I1213 06:49:59.595368 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 13 06:49:59 crc kubenswrapper[4644]: I1213 06:49:59.630371 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 13 06:49:59 crc kubenswrapper[4644]: I1213 06:49:59.668538 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 13 06:49:59 crc kubenswrapper[4644]: I1213 06:49:59.779940 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 13 06:49:59 crc kubenswrapper[4644]: I1213 06:49:59.916051 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 13 06:49:59 crc kubenswrapper[4644]: I1213 06:49:59.993930 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.039548 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.152659 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.185410 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.207010 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.232196 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.277313 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.325134 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.325920 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.373822 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.417899 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.448276 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.477048 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.510724 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.543063 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.550515 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.623347 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.635383 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.695851 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.730042 4644 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.754990 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.786105 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.822220 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.891814 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.939011 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.952803 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.974589 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 13 06:50:00 crc kubenswrapper[4644]: I1213 06:50:00.981219 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.010910 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.059879 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.076239 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.077296 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.175939 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.199276 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.249214 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.275894 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.300125 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.316570 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.336267 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.442001 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.455818 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.492325 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.508004 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.545674 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.590486 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.669621 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.726638 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.728053 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.750161 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.754380 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.828219 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.836957 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.845948 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.885174 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.949778 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.979206 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 13 06:50:01 crc kubenswrapper[4644]: I1213 06:50:01.986049 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.000017 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.000254 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.013722 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.027827 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.131292 4644 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.144839 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.158664 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.208255 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.208333 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.211325 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.304388 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.305797 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.315062 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.332391 4644 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.332426 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.332923 4644 scope.go:117] "RemoveContainer" containerID="41d9f431a026e16bcd1a0261f98a3d8767123286e6f8ee303dfbe8fd087f7a1c" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.349189 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.399830 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.534201 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.599202 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.680040 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.707267 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.714243 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.739435 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.747585 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.812701 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.823653 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.845588 4644 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.848828 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-crmqj","openshift-marketplace/redhat-marketplace-pngbm","openshift-marketplace/community-operators-2bmvc","openshift-marketplace/redhat-operators-gqhfh","openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/certified-operators-54mm5"] Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.848894 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.848913 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gb9x7"] Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.852093 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.852617 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.856520 4644 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.865332 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=18.865320248 podStartE2EDuration="18.865320248s" podCreationTimestamp="2025-12-13 06:49:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:50:02.862873316 +0000 UTC m=+265.077824150" watchObservedRunningTime="2025-12-13 06:50:02.865320248 +0000 UTC m=+265.080271082" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.887062 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.897758 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.899584 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-gb9x7_33f05cad-7917-4d9a-870d-b68d4388bbde/marketplace-operator/2.log" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.900095 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-gb9x7_33f05cad-7917-4d9a-870d-b68d4388bbde/marketplace-operator/1.log" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.900144 4644 generic.go:334] "Generic (PLEG): container finished" podID="33f05cad-7917-4d9a-870d-b68d4388bbde" containerID="b01e38e424f13d6b8a58909a37ab8d7d74b29ab14fa743d66938d6a47dceb6c9" exitCode=1 Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.900309 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" event={"ID":"33f05cad-7917-4d9a-870d-b68d4388bbde","Type":"ContainerDied","Data":"b01e38e424f13d6b8a58909a37ab8d7d74b29ab14fa743d66938d6a47dceb6c9"} Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.900763 4644 scope.go:117] "RemoveContainer" containerID="b01e38e424f13d6b8a58909a37ab8d7d74b29ab14fa743d66938d6a47dceb6c9" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.901072 4644 scope.go:117] "RemoveContainer" containerID="41d9f431a026e16bcd1a0261f98a3d8767123286e6f8ee303dfbe8fd087f7a1c" Dec 13 06:50:02 crc kubenswrapper[4644]: E1213 06:50:02.901075 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-gb9x7_openshift-marketplace(33f05cad-7917-4d9a-870d-b68d4388bbde)\"" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" podUID="33f05cad-7917-4d9a-870d-b68d4388bbde" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.947142 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.969383 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.984569 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 13 06:50:02 crc kubenswrapper[4644]: I1213 06:50:02.999041 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.042268 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.060824 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.085722 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.095883 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.124817 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.133419 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.307996 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.334043 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.371598 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.399730 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.530655 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.538472 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.666409 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.722149 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.729540 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.736525 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.740574 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.815648 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.851723 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.867358 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.898086 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.904328 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-gb9x7_33f05cad-7917-4d9a-870d-b68d4388bbde/marketplace-operator/2.log" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.904850 4644 scope.go:117] "RemoveContainer" containerID="b01e38e424f13d6b8a58909a37ab8d7d74b29ab14fa743d66938d6a47dceb6c9" Dec 13 06:50:03 crc kubenswrapper[4644]: E1213 06:50:03.905012 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-gb9x7_openshift-marketplace(33f05cad-7917-4d9a-870d-b68d4388bbde)\"" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" podUID="33f05cad-7917-4d9a-870d-b68d4388bbde" Dec 13 06:50:03 crc kubenswrapper[4644]: I1213 06:50:03.944074 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.130782 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.154342 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.322400 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.351481 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.390135 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.394137 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" path="/var/lib/kubelet/pods/10291551-2baf-4271-bc49-6a40e5ceb94b/volumes" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.394752 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b355a1b-28e0-462c-a1ef-43eea6341565" path="/var/lib/kubelet/pods/4b355a1b-28e0-462c-a1ef-43eea6341565/volumes" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.395300 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" path="/var/lib/kubelet/pods/5de2f44b-564a-461c-b9e1-b4b306d8ecb1/volumes" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.396117 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" path="/var/lib/kubelet/pods/8b4d6920-0938-4fcf-a825-bfaec69da684/volumes" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.396671 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" path="/var/lib/kubelet/pods/e9452a17-eda0-4e66-bf4f-30e6e8ac8693/volumes" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.404400 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.407013 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.424036 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.658424 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.883047 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 13 06:50:04 crc kubenswrapper[4644]: I1213 06:50:04.978817 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 13 06:50:05 crc kubenswrapper[4644]: I1213 06:50:05.145798 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 13 06:50:05 crc kubenswrapper[4644]: I1213 06:50:05.296024 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 13 06:50:05 crc kubenswrapper[4644]: I1213 06:50:05.388348 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 13 06:50:05 crc kubenswrapper[4644]: I1213 06:50:05.400874 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 13 06:50:05 crc kubenswrapper[4644]: I1213 06:50:05.406984 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 13 06:50:05 crc kubenswrapper[4644]: I1213 06:50:05.446359 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 13 06:50:05 crc kubenswrapper[4644]: I1213 06:50:05.498476 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 13 06:50:05 crc kubenswrapper[4644]: I1213 06:50:05.611969 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 13 06:50:05 crc kubenswrapper[4644]: I1213 06:50:05.620162 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 13 06:50:05 crc kubenswrapper[4644]: I1213 06:50:05.759180 4644 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 13 06:50:05 crc kubenswrapper[4644]: I1213 06:50:05.759374 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://a167f0d33bb69e726768ada9b577a95f3724b31298f56775c70cf1ad36f08b86" gracePeriod=5 Dec 13 06:50:05 crc kubenswrapper[4644]: I1213 06:50:05.779203 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 13 06:50:05 crc kubenswrapper[4644]: I1213 06:50:05.814425 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 13 06:50:05 crc kubenswrapper[4644]: I1213 06:50:05.940456 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.130556 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.155255 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.203697 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.229154 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.270159 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.326480 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.398694 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.441171 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.452751 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.590251 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.729783 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.736962 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.782257 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.828516 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.922317 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 13 06:50:06 crc kubenswrapper[4644]: I1213 06:50:06.975281 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.109959 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.127274 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.220639 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.263575 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.312512 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.439000 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.443530 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.448666 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.521089 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.577605 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.666779 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.678642 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.841664 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.864799 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.890273 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.962458 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 13 06:50:07 crc kubenswrapper[4644]: I1213 06:50:07.962638 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 13 06:50:08 crc kubenswrapper[4644]: I1213 06:50:08.031678 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 13 06:50:08 crc kubenswrapper[4644]: I1213 06:50:08.048168 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 13 06:50:08 crc kubenswrapper[4644]: I1213 06:50:08.048369 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 13 06:50:08 crc kubenswrapper[4644]: I1213 06:50:08.150703 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 13 06:50:08 crc kubenswrapper[4644]: I1213 06:50:08.187007 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 13 06:50:08 crc kubenswrapper[4644]: I1213 06:50:08.237849 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 13 06:50:08 crc kubenswrapper[4644]: I1213 06:50:08.463325 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 13 06:50:08 crc kubenswrapper[4644]: I1213 06:50:08.701379 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 13 06:50:08 crc kubenswrapper[4644]: I1213 06:50:08.755855 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 13 06:50:08 crc kubenswrapper[4644]: I1213 06:50:08.797320 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 13 06:50:09 crc kubenswrapper[4644]: I1213 06:50:09.038061 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 13 06:50:09 crc kubenswrapper[4644]: I1213 06:50:09.078132 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 13 06:50:09 crc kubenswrapper[4644]: I1213 06:50:09.330553 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 13 06:50:09 crc kubenswrapper[4644]: I1213 06:50:09.455810 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 13 06:50:09 crc kubenswrapper[4644]: I1213 06:50:09.558124 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 13 06:50:09 crc kubenswrapper[4644]: I1213 06:50:09.654962 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 13 06:50:09 crc kubenswrapper[4644]: I1213 06:50:09.724854 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 13 06:50:09 crc kubenswrapper[4644]: I1213 06:50:09.778912 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 13 06:50:09 crc kubenswrapper[4644]: I1213 06:50:09.849033 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 13 06:50:09 crc kubenswrapper[4644]: I1213 06:50:09.945985 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 13 06:50:09 crc kubenswrapper[4644]: I1213 06:50:09.959587 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 13 06:50:10 crc kubenswrapper[4644]: I1213 06:50:10.142039 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 13 06:50:10 crc kubenswrapper[4644]: I1213 06:50:10.456244 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 13 06:50:10 crc kubenswrapper[4644]: I1213 06:50:10.472151 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 13 06:50:10 crc kubenswrapper[4644]: I1213 06:50:10.567050 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 13 06:50:10 crc kubenswrapper[4644]: I1213 06:50:10.591880 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 13 06:50:10 crc kubenswrapper[4644]: I1213 06:50:10.931133 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 13 06:50:10 crc kubenswrapper[4644]: I1213 06:50:10.931187 4644 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="a167f0d33bb69e726768ada9b577a95f3724b31298f56775c70cf1ad36f08b86" exitCode=137 Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.319719 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.319953 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.353099 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.353166 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.353196 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.353227 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.353264 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.353512 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.353549 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.353568 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.353585 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.363978 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.454319 4644 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.454354 4644 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.454365 4644 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.454373 4644 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.454381 4644 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.938418 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.938549 4644 scope.go:117] "RemoveContainer" containerID="a167f0d33bb69e726768ada9b577a95f3724b31298f56775c70cf1ad36f08b86" Dec 13 06:50:11 crc kubenswrapper[4644]: I1213 06:50:11.938630 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 06:50:12 crc kubenswrapper[4644]: I1213 06:50:12.332573 4644 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:50:12 crc kubenswrapper[4644]: I1213 06:50:12.332630 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:50:12 crc kubenswrapper[4644]: I1213 06:50:12.333269 4644 scope.go:117] "RemoveContainer" containerID="b01e38e424f13d6b8a58909a37ab8d7d74b29ab14fa743d66938d6a47dceb6c9" Dec 13 06:50:12 crc kubenswrapper[4644]: E1213 06:50:12.333583 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-gb9x7_openshift-marketplace(33f05cad-7917-4d9a-870d-b68d4388bbde)\"" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" podUID="33f05cad-7917-4d9a-870d-b68d4388bbde" Dec 13 06:50:12 crc kubenswrapper[4644]: I1213 06:50:12.393389 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 13 06:50:26 crc kubenswrapper[4644]: I1213 06:50:26.389591 4644 scope.go:117] "RemoveContainer" containerID="b01e38e424f13d6b8a58909a37ab8d7d74b29ab14fa743d66938d6a47dceb6c9" Dec 13 06:50:27 crc kubenswrapper[4644]: I1213 06:50:27.004417 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-gb9x7_33f05cad-7917-4d9a-870d-b68d4388bbde/marketplace-operator/2.log" Dec 13 06:50:27 crc kubenswrapper[4644]: I1213 06:50:27.004690 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" event={"ID":"33f05cad-7917-4d9a-870d-b68d4388bbde","Type":"ContainerStarted","Data":"8f67bdd3392b5333bbe15472eab6e305998c7e6d1534ff8354bd748cb3f04c52"} Dec 13 06:50:27 crc kubenswrapper[4644]: I1213 06:50:27.005104 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:50:27 crc kubenswrapper[4644]: I1213 06:50:27.007080 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" Dec 13 06:50:27 crc kubenswrapper[4644]: I1213 06:50:27.018025 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-gb9x7" podStartSLOduration=65.018014539 podStartE2EDuration="1m5.018014539s" podCreationTimestamp="2025-12-13 06:49:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:49:50.855146982 +0000 UTC m=+253.070097815" watchObservedRunningTime="2025-12-13 06:50:27.018014539 +0000 UTC m=+289.232965372" Dec 13 06:51:09 crc kubenswrapper[4644]: I1213 06:51:09.753916 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 06:51:09 crc kubenswrapper[4644]: I1213 06:51:09.754356 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.196933 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x79lz"] Dec 13 06:51:24 crc kubenswrapper[4644]: E1213 06:51:24.197639 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" containerName="extract-utilities" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197654 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" containerName="extract-utilities" Dec 13 06:51:24 crc kubenswrapper[4644]: E1213 06:51:24.197661 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" containerName="extract-utilities" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197666 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" containerName="extract-utilities" Dec 13 06:51:24 crc kubenswrapper[4644]: E1213 06:51:24.197673 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" containerName="registry-server" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197678 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" containerName="registry-server" Dec 13 06:51:24 crc kubenswrapper[4644]: E1213 06:51:24.197688 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" containerName="extract-content" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197693 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" containerName="extract-content" Dec 13 06:51:24 crc kubenswrapper[4644]: E1213 06:51:24.197703 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197709 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 13 06:51:24 crc kubenswrapper[4644]: E1213 06:51:24.197729 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" containerName="extract-content" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197734 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" containerName="extract-content" Dec 13 06:51:24 crc kubenswrapper[4644]: E1213 06:51:24.197743 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" containerName="registry-server" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197748 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" containerName="registry-server" Dec 13 06:51:24 crc kubenswrapper[4644]: E1213 06:51:24.197756 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" containerName="registry-server" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197762 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" containerName="registry-server" Dec 13 06:51:24 crc kubenswrapper[4644]: E1213 06:51:24.197770 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" containerName="extract-content" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197775 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" containerName="extract-content" Dec 13 06:51:24 crc kubenswrapper[4644]: E1213 06:51:24.197784 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9beaac24-f4b1-461d-94d0-d798a89ce9a0" containerName="installer" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197789 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="9beaac24-f4b1-461d-94d0-d798a89ce9a0" containerName="installer" Dec 13 06:51:24 crc kubenswrapper[4644]: E1213 06:51:24.197798 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" containerName="marketplace-operator" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197803 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" containerName="marketplace-operator" Dec 13 06:51:24 crc kubenswrapper[4644]: E1213 06:51:24.197811 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" containerName="extract-utilities" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197817 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" containerName="extract-utilities" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197899 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9452a17-eda0-4e66-bf4f-30e6e8ac8693" containerName="registry-server" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197909 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197919 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="9beaac24-f4b1-461d-94d0-d798a89ce9a0" containerName="installer" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197927 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b4d6920-0938-4fcf-a825-bfaec69da684" containerName="registry-server" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197935 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="5de2f44b-564a-461c-b9e1-b4b306d8ecb1" containerName="marketplace-operator" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.197942 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="10291551-2baf-4271-bc49-6a40e5ceb94b" containerName="registry-server" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.198545 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x79lz" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.200251 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.205058 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x79lz"] Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.344490 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbn9c\" (UniqueName: \"kubernetes.io/projected/26b8eb80-532a-4b48-8c6f-e68f835f94e0-kube-api-access-cbn9c\") pod \"certified-operators-x79lz\" (UID: \"26b8eb80-532a-4b48-8c6f-e68f835f94e0\") " pod="openshift-marketplace/certified-operators-x79lz" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.344551 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26b8eb80-532a-4b48-8c6f-e68f835f94e0-catalog-content\") pod \"certified-operators-x79lz\" (UID: \"26b8eb80-532a-4b48-8c6f-e68f835f94e0\") " pod="openshift-marketplace/certified-operators-x79lz" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.344952 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26b8eb80-532a-4b48-8c6f-e68f835f94e0-utilities\") pod \"certified-operators-x79lz\" (UID: \"26b8eb80-532a-4b48-8c6f-e68f835f94e0\") " pod="openshift-marketplace/certified-operators-x79lz" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.395075 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wmj6h"] Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.396131 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wmj6h" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.400031 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.404430 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wmj6h"] Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.446026 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26b8eb80-532a-4b48-8c6f-e68f835f94e0-utilities\") pod \"certified-operators-x79lz\" (UID: \"26b8eb80-532a-4b48-8c6f-e68f835f94e0\") " pod="openshift-marketplace/certified-operators-x79lz" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.446103 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbn9c\" (UniqueName: \"kubernetes.io/projected/26b8eb80-532a-4b48-8c6f-e68f835f94e0-kube-api-access-cbn9c\") pod \"certified-operators-x79lz\" (UID: \"26b8eb80-532a-4b48-8c6f-e68f835f94e0\") " pod="openshift-marketplace/certified-operators-x79lz" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.446129 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26b8eb80-532a-4b48-8c6f-e68f835f94e0-catalog-content\") pod \"certified-operators-x79lz\" (UID: \"26b8eb80-532a-4b48-8c6f-e68f835f94e0\") " pod="openshift-marketplace/certified-operators-x79lz" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.446818 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26b8eb80-532a-4b48-8c6f-e68f835f94e0-utilities\") pod \"certified-operators-x79lz\" (UID: \"26b8eb80-532a-4b48-8c6f-e68f835f94e0\") " pod="openshift-marketplace/certified-operators-x79lz" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.446833 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26b8eb80-532a-4b48-8c6f-e68f835f94e0-catalog-content\") pod \"certified-operators-x79lz\" (UID: \"26b8eb80-532a-4b48-8c6f-e68f835f94e0\") " pod="openshift-marketplace/certified-operators-x79lz" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.470245 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbn9c\" (UniqueName: \"kubernetes.io/projected/26b8eb80-532a-4b48-8c6f-e68f835f94e0-kube-api-access-cbn9c\") pod \"certified-operators-x79lz\" (UID: \"26b8eb80-532a-4b48-8c6f-e68f835f94e0\") " pod="openshift-marketplace/certified-operators-x79lz" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.515394 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x79lz" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.547289 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e17683d9-a928-4409-ad53-736c7c243d29-utilities\") pod \"community-operators-wmj6h\" (UID: \"e17683d9-a928-4409-ad53-736c7c243d29\") " pod="openshift-marketplace/community-operators-wmj6h" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.547362 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf5ms\" (UniqueName: \"kubernetes.io/projected/e17683d9-a928-4409-ad53-736c7c243d29-kube-api-access-zf5ms\") pod \"community-operators-wmj6h\" (UID: \"e17683d9-a928-4409-ad53-736c7c243d29\") " pod="openshift-marketplace/community-operators-wmj6h" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.547432 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e17683d9-a928-4409-ad53-736c7c243d29-catalog-content\") pod \"community-operators-wmj6h\" (UID: \"e17683d9-a928-4409-ad53-736c7c243d29\") " pod="openshift-marketplace/community-operators-wmj6h" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.648265 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e17683d9-a928-4409-ad53-736c7c243d29-utilities\") pod \"community-operators-wmj6h\" (UID: \"e17683d9-a928-4409-ad53-736c7c243d29\") " pod="openshift-marketplace/community-operators-wmj6h" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.648637 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf5ms\" (UniqueName: \"kubernetes.io/projected/e17683d9-a928-4409-ad53-736c7c243d29-kube-api-access-zf5ms\") pod \"community-operators-wmj6h\" (UID: \"e17683d9-a928-4409-ad53-736c7c243d29\") " pod="openshift-marketplace/community-operators-wmj6h" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.648691 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e17683d9-a928-4409-ad53-736c7c243d29-catalog-content\") pod \"community-operators-wmj6h\" (UID: \"e17683d9-a928-4409-ad53-736c7c243d29\") " pod="openshift-marketplace/community-operators-wmj6h" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.649258 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e17683d9-a928-4409-ad53-736c7c243d29-utilities\") pod \"community-operators-wmj6h\" (UID: \"e17683d9-a928-4409-ad53-736c7c243d29\") " pod="openshift-marketplace/community-operators-wmj6h" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.649312 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e17683d9-a928-4409-ad53-736c7c243d29-catalog-content\") pod \"community-operators-wmj6h\" (UID: \"e17683d9-a928-4409-ad53-736c7c243d29\") " pod="openshift-marketplace/community-operators-wmj6h" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.663763 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf5ms\" (UniqueName: \"kubernetes.io/projected/e17683d9-a928-4409-ad53-736c7c243d29-kube-api-access-zf5ms\") pod \"community-operators-wmj6h\" (UID: \"e17683d9-a928-4409-ad53-736c7c243d29\") " pod="openshift-marketplace/community-operators-wmj6h" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.710625 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wmj6h" Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.874581 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wmj6h"] Dec 13 06:51:24 crc kubenswrapper[4644]: I1213 06:51:24.880167 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x79lz"] Dec 13 06:51:24 crc kubenswrapper[4644]: W1213 06:51:24.882926 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26b8eb80_532a_4b48_8c6f_e68f835f94e0.slice/crio-10a050e1b12ac6650a1a8a7dc9179295a69cc65ce98c11b351062b64a0129eab WatchSource:0}: Error finding container 10a050e1b12ac6650a1a8a7dc9179295a69cc65ce98c11b351062b64a0129eab: Status 404 returned error can't find the container with id 10a050e1b12ac6650a1a8a7dc9179295a69cc65ce98c11b351062b64a0129eab Dec 13 06:51:25 crc kubenswrapper[4644]: I1213 06:51:25.259127 4644 generic.go:334] "Generic (PLEG): container finished" podID="26b8eb80-532a-4b48-8c6f-e68f835f94e0" containerID="61f212aaaa26e19d2ca1bbfffb702ad7207dc0338806a7a2bc9507cfc1172734" exitCode=0 Dec 13 06:51:25 crc kubenswrapper[4644]: I1213 06:51:25.259204 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x79lz" event={"ID":"26b8eb80-532a-4b48-8c6f-e68f835f94e0","Type":"ContainerDied","Data":"61f212aaaa26e19d2ca1bbfffb702ad7207dc0338806a7a2bc9507cfc1172734"} Dec 13 06:51:25 crc kubenswrapper[4644]: I1213 06:51:25.259470 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x79lz" event={"ID":"26b8eb80-532a-4b48-8c6f-e68f835f94e0","Type":"ContainerStarted","Data":"10a050e1b12ac6650a1a8a7dc9179295a69cc65ce98c11b351062b64a0129eab"} Dec 13 06:51:25 crc kubenswrapper[4644]: I1213 06:51:25.261464 4644 generic.go:334] "Generic (PLEG): container finished" podID="e17683d9-a928-4409-ad53-736c7c243d29" containerID="3a819f98b5347f09d3bb2bd5b518c9763c01c52b607ba1bf72bca80dfaebc2e8" exitCode=0 Dec 13 06:51:25 crc kubenswrapper[4644]: I1213 06:51:25.261493 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wmj6h" event={"ID":"e17683d9-a928-4409-ad53-736c7c243d29","Type":"ContainerDied","Data":"3a819f98b5347f09d3bb2bd5b518c9763c01c52b607ba1bf72bca80dfaebc2e8"} Dec 13 06:51:25 crc kubenswrapper[4644]: I1213 06:51:25.261516 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wmj6h" event={"ID":"e17683d9-a928-4409-ad53-736c7c243d29","Type":"ContainerStarted","Data":"ea075e4a9f5bb28a14a03fc2e0a0fd8e9dbad6b6e380979453c95e0ae79aa0b2"} Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.267818 4644 generic.go:334] "Generic (PLEG): container finished" podID="26b8eb80-532a-4b48-8c6f-e68f835f94e0" containerID="0e1267ba59d1e69a75db3c1a4fc8ea6fd6d11db2d4e756be2b959d8ce5e49ba4" exitCode=0 Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.267915 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x79lz" event={"ID":"26b8eb80-532a-4b48-8c6f-e68f835f94e0","Type":"ContainerDied","Data":"0e1267ba59d1e69a75db3c1a4fc8ea6fd6d11db2d4e756be2b959d8ce5e49ba4"} Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.273053 4644 generic.go:334] "Generic (PLEG): container finished" podID="e17683d9-a928-4409-ad53-736c7c243d29" containerID="7295c88ce97145e443a515200795bd59fee46e28a29af17f61316adb44e6afae" exitCode=0 Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.273096 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wmj6h" event={"ID":"e17683d9-a928-4409-ad53-736c7c243d29","Type":"ContainerDied","Data":"7295c88ce97145e443a515200795bd59fee46e28a29af17f61316adb44e6afae"} Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.600329 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mfrqg"] Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.601193 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mfrqg" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.603528 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.620213 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mfrqg"] Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.678079 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38f2631f-f251-4435-aa08-588b1586ae3a-utilities\") pod \"redhat-marketplace-mfrqg\" (UID: \"38f2631f-f251-4435-aa08-588b1586ae3a\") " pod="openshift-marketplace/redhat-marketplace-mfrqg" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.678177 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38f2631f-f251-4435-aa08-588b1586ae3a-catalog-content\") pod \"redhat-marketplace-mfrqg\" (UID: \"38f2631f-f251-4435-aa08-588b1586ae3a\") " pod="openshift-marketplace/redhat-marketplace-mfrqg" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.678240 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxl2q\" (UniqueName: \"kubernetes.io/projected/38f2631f-f251-4435-aa08-588b1586ae3a-kube-api-access-xxl2q\") pod \"redhat-marketplace-mfrqg\" (UID: \"38f2631f-f251-4435-aa08-588b1586ae3a\") " pod="openshift-marketplace/redhat-marketplace-mfrqg" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.780116 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxl2q\" (UniqueName: \"kubernetes.io/projected/38f2631f-f251-4435-aa08-588b1586ae3a-kube-api-access-xxl2q\") pod \"redhat-marketplace-mfrqg\" (UID: \"38f2631f-f251-4435-aa08-588b1586ae3a\") " pod="openshift-marketplace/redhat-marketplace-mfrqg" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.780578 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38f2631f-f251-4435-aa08-588b1586ae3a-utilities\") pod \"redhat-marketplace-mfrqg\" (UID: \"38f2631f-f251-4435-aa08-588b1586ae3a\") " pod="openshift-marketplace/redhat-marketplace-mfrqg" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.780656 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38f2631f-f251-4435-aa08-588b1586ae3a-catalog-content\") pod \"redhat-marketplace-mfrqg\" (UID: \"38f2631f-f251-4435-aa08-588b1586ae3a\") " pod="openshift-marketplace/redhat-marketplace-mfrqg" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.781088 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38f2631f-f251-4435-aa08-588b1586ae3a-utilities\") pod \"redhat-marketplace-mfrqg\" (UID: \"38f2631f-f251-4435-aa08-588b1586ae3a\") " pod="openshift-marketplace/redhat-marketplace-mfrqg" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.781155 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38f2631f-f251-4435-aa08-588b1586ae3a-catalog-content\") pod \"redhat-marketplace-mfrqg\" (UID: \"38f2631f-f251-4435-aa08-588b1586ae3a\") " pod="openshift-marketplace/redhat-marketplace-mfrqg" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.797123 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ng2d9"] Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.798271 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ng2d9" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.799089 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxl2q\" (UniqueName: \"kubernetes.io/projected/38f2631f-f251-4435-aa08-588b1586ae3a-kube-api-access-xxl2q\") pod \"redhat-marketplace-mfrqg\" (UID: \"38f2631f-f251-4435-aa08-588b1586ae3a\") " pod="openshift-marketplace/redhat-marketplace-mfrqg" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.803206 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.807499 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ng2d9"] Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.881397 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4-catalog-content\") pod \"redhat-operators-ng2d9\" (UID: \"c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4\") " pod="openshift-marketplace/redhat-operators-ng2d9" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.881482 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4-utilities\") pod \"redhat-operators-ng2d9\" (UID: \"c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4\") " pod="openshift-marketplace/redhat-operators-ng2d9" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.881605 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-625rn\" (UniqueName: \"kubernetes.io/projected/c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4-kube-api-access-625rn\") pod \"redhat-operators-ng2d9\" (UID: \"c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4\") " pod="openshift-marketplace/redhat-operators-ng2d9" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.912652 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mfrqg" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.983246 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-625rn\" (UniqueName: \"kubernetes.io/projected/c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4-kube-api-access-625rn\") pod \"redhat-operators-ng2d9\" (UID: \"c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4\") " pod="openshift-marketplace/redhat-operators-ng2d9" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.983377 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4-catalog-content\") pod \"redhat-operators-ng2d9\" (UID: \"c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4\") " pod="openshift-marketplace/redhat-operators-ng2d9" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.983455 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4-utilities\") pod \"redhat-operators-ng2d9\" (UID: \"c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4\") " pod="openshift-marketplace/redhat-operators-ng2d9" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.983853 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4-catalog-content\") pod \"redhat-operators-ng2d9\" (UID: \"c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4\") " pod="openshift-marketplace/redhat-operators-ng2d9" Dec 13 06:51:26 crc kubenswrapper[4644]: I1213 06:51:26.983952 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4-utilities\") pod \"redhat-operators-ng2d9\" (UID: \"c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4\") " pod="openshift-marketplace/redhat-operators-ng2d9" Dec 13 06:51:27 crc kubenswrapper[4644]: I1213 06:51:27.002829 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-625rn\" (UniqueName: \"kubernetes.io/projected/c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4-kube-api-access-625rn\") pod \"redhat-operators-ng2d9\" (UID: \"c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4\") " pod="openshift-marketplace/redhat-operators-ng2d9" Dec 13 06:51:27 crc kubenswrapper[4644]: I1213 06:51:27.097826 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mfrqg"] Dec 13 06:51:27 crc kubenswrapper[4644]: W1213 06:51:27.105873 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38f2631f_f251_4435_aa08_588b1586ae3a.slice/crio-d27b9b5a81cf5128c3c4f1352977463673840b192854977d21f036ce6d53d4f3 WatchSource:0}: Error finding container d27b9b5a81cf5128c3c4f1352977463673840b192854977d21f036ce6d53d4f3: Status 404 returned error can't find the container with id d27b9b5a81cf5128c3c4f1352977463673840b192854977d21f036ce6d53d4f3 Dec 13 06:51:27 crc kubenswrapper[4644]: I1213 06:51:27.125915 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ng2d9" Dec 13 06:51:27 crc kubenswrapper[4644]: I1213 06:51:27.280570 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wmj6h" event={"ID":"e17683d9-a928-4409-ad53-736c7c243d29","Type":"ContainerStarted","Data":"28d78b031efc82ab6667110ff0ef02e0f8b978428b686c8c55ec69b36c0bf506"} Dec 13 06:51:27 crc kubenswrapper[4644]: I1213 06:51:27.286113 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x79lz" event={"ID":"26b8eb80-532a-4b48-8c6f-e68f835f94e0","Type":"ContainerStarted","Data":"ccdf3bf7e2b59d7dfae879ecfa7db49877ad5ea22e3c8f637b7bdea4ebfe44e5"} Dec 13 06:51:27 crc kubenswrapper[4644]: I1213 06:51:27.292745 4644 generic.go:334] "Generic (PLEG): container finished" podID="38f2631f-f251-4435-aa08-588b1586ae3a" containerID="b4736c1ba74b1b563e516997de5613e6ed9a096d389c054bbf061a0b1bfd2d5b" exitCode=0 Dec 13 06:51:27 crc kubenswrapper[4644]: I1213 06:51:27.292790 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfrqg" event={"ID":"38f2631f-f251-4435-aa08-588b1586ae3a","Type":"ContainerDied","Data":"b4736c1ba74b1b563e516997de5613e6ed9a096d389c054bbf061a0b1bfd2d5b"} Dec 13 06:51:27 crc kubenswrapper[4644]: I1213 06:51:27.292820 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfrqg" event={"ID":"38f2631f-f251-4435-aa08-588b1586ae3a","Type":"ContainerStarted","Data":"d27b9b5a81cf5128c3c4f1352977463673840b192854977d21f036ce6d53d4f3"} Dec 13 06:51:27 crc kubenswrapper[4644]: I1213 06:51:27.300060 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wmj6h" podStartSLOduration=1.651052534 podStartE2EDuration="3.299719035s" podCreationTimestamp="2025-12-13 06:51:24 +0000 UTC" firstStartedPulling="2025-12-13 06:51:25.26300134 +0000 UTC m=+347.477952173" lastFinishedPulling="2025-12-13 06:51:26.91166784 +0000 UTC m=+349.126618674" observedRunningTime="2025-12-13 06:51:27.298650616 +0000 UTC m=+349.513601449" watchObservedRunningTime="2025-12-13 06:51:27.299719035 +0000 UTC m=+349.514669869" Dec 13 06:51:27 crc kubenswrapper[4644]: I1213 06:51:27.326850 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ng2d9"] Dec 13 06:51:27 crc kubenswrapper[4644]: I1213 06:51:27.336673 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x79lz" podStartSLOduration=1.669538908 podStartE2EDuration="3.336651959s" podCreationTimestamp="2025-12-13 06:51:24 +0000 UTC" firstStartedPulling="2025-12-13 06:51:25.26098678 +0000 UTC m=+347.475937613" lastFinishedPulling="2025-12-13 06:51:26.928099831 +0000 UTC m=+349.143050664" observedRunningTime="2025-12-13 06:51:27.331609594 +0000 UTC m=+349.546560427" watchObservedRunningTime="2025-12-13 06:51:27.336651959 +0000 UTC m=+349.551602792" Dec 13 06:51:28 crc kubenswrapper[4644]: I1213 06:51:28.301021 4644 generic.go:334] "Generic (PLEG): container finished" podID="c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4" containerID="301eebcea8468a7a6a5e929e19f9b0823b4b8aa63bb5e20e463a42df00b0ef87" exitCode=0 Dec 13 06:51:28 crc kubenswrapper[4644]: I1213 06:51:28.301250 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ng2d9" event={"ID":"c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4","Type":"ContainerDied","Data":"301eebcea8468a7a6a5e929e19f9b0823b4b8aa63bb5e20e463a42df00b0ef87"} Dec 13 06:51:28 crc kubenswrapper[4644]: I1213 06:51:28.301603 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ng2d9" event={"ID":"c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4","Type":"ContainerStarted","Data":"241143f4f057a7e5b1cf10f545068324311866ec39f2d46f73342e0619e4a0c7"} Dec 13 06:51:29 crc kubenswrapper[4644]: I1213 06:51:29.309218 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ng2d9" event={"ID":"c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4","Type":"ContainerStarted","Data":"8ed36dde1dd001b1c874e6120ed51b184aeaba9678126a7a917debfd0b18a99f"} Dec 13 06:51:29 crc kubenswrapper[4644]: I1213 06:51:29.311583 4644 generic.go:334] "Generic (PLEG): container finished" podID="38f2631f-f251-4435-aa08-588b1586ae3a" containerID="ba9256048d277dc2677498f478f2962a03e05ef9d0caec001c935004f19617f5" exitCode=0 Dec 13 06:51:29 crc kubenswrapper[4644]: I1213 06:51:29.311635 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfrqg" event={"ID":"38f2631f-f251-4435-aa08-588b1586ae3a","Type":"ContainerDied","Data":"ba9256048d277dc2677498f478f2962a03e05ef9d0caec001c935004f19617f5"} Dec 13 06:51:30 crc kubenswrapper[4644]: I1213 06:51:30.317088 4644 generic.go:334] "Generic (PLEG): container finished" podID="c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4" containerID="8ed36dde1dd001b1c874e6120ed51b184aeaba9678126a7a917debfd0b18a99f" exitCode=0 Dec 13 06:51:30 crc kubenswrapper[4644]: I1213 06:51:30.317168 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ng2d9" event={"ID":"c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4","Type":"ContainerDied","Data":"8ed36dde1dd001b1c874e6120ed51b184aeaba9678126a7a917debfd0b18a99f"} Dec 13 06:51:30 crc kubenswrapper[4644]: I1213 06:51:30.320931 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mfrqg" event={"ID":"38f2631f-f251-4435-aa08-588b1586ae3a","Type":"ContainerStarted","Data":"d284ecd694b2b82193c36bf1bd09bdd8de745370174cdc957bcd7c567491b927"} Dec 13 06:51:30 crc kubenswrapper[4644]: I1213 06:51:30.350244 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mfrqg" podStartSLOduration=1.813577804 podStartE2EDuration="4.350222852s" podCreationTimestamp="2025-12-13 06:51:26 +0000 UTC" firstStartedPulling="2025-12-13 06:51:27.29402808 +0000 UTC m=+349.508978913" lastFinishedPulling="2025-12-13 06:51:29.830673128 +0000 UTC m=+352.045623961" observedRunningTime="2025-12-13 06:51:30.348263876 +0000 UTC m=+352.563214709" watchObservedRunningTime="2025-12-13 06:51:30.350222852 +0000 UTC m=+352.565173684" Dec 13 06:51:31 crc kubenswrapper[4644]: I1213 06:51:31.327191 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ng2d9" event={"ID":"c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4","Type":"ContainerStarted","Data":"bac2148a2c415c14e2b6f13923bf7303e06d801968462551ff5ae61b2f98c86b"} Dec 13 06:51:31 crc kubenswrapper[4644]: I1213 06:51:31.358371 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ng2d9" podStartSLOduration=2.770514476 podStartE2EDuration="5.358340329s" podCreationTimestamp="2025-12-13 06:51:26 +0000 UTC" firstStartedPulling="2025-12-13 06:51:28.303040041 +0000 UTC m=+350.517990874" lastFinishedPulling="2025-12-13 06:51:30.890865893 +0000 UTC m=+353.105816727" observedRunningTime="2025-12-13 06:51:31.343869669 +0000 UTC m=+353.558820502" watchObservedRunningTime="2025-12-13 06:51:31.358340329 +0000 UTC m=+353.573291152" Dec 13 06:51:34 crc kubenswrapper[4644]: I1213 06:51:34.515774 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x79lz" Dec 13 06:51:34 crc kubenswrapper[4644]: I1213 06:51:34.516037 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x79lz" Dec 13 06:51:34 crc kubenswrapper[4644]: I1213 06:51:34.554193 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x79lz" Dec 13 06:51:34 crc kubenswrapper[4644]: I1213 06:51:34.711629 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wmj6h" Dec 13 06:51:34 crc kubenswrapper[4644]: I1213 06:51:34.711690 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wmj6h" Dec 13 06:51:34 crc kubenswrapper[4644]: I1213 06:51:34.743975 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wmj6h" Dec 13 06:51:35 crc kubenswrapper[4644]: I1213 06:51:35.374310 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wmj6h" Dec 13 06:51:35 crc kubenswrapper[4644]: I1213 06:51:35.375193 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x79lz" Dec 13 06:51:36 crc kubenswrapper[4644]: I1213 06:51:36.913615 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mfrqg" Dec 13 06:51:36 crc kubenswrapper[4644]: I1213 06:51:36.913930 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mfrqg" Dec 13 06:51:36 crc kubenswrapper[4644]: I1213 06:51:36.943742 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mfrqg" Dec 13 06:51:37 crc kubenswrapper[4644]: I1213 06:51:37.126552 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ng2d9" Dec 13 06:51:37 crc kubenswrapper[4644]: I1213 06:51:37.126623 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ng2d9" Dec 13 06:51:37 crc kubenswrapper[4644]: I1213 06:51:37.157848 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ng2d9" Dec 13 06:51:37 crc kubenswrapper[4644]: I1213 06:51:37.386039 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ng2d9" Dec 13 06:51:37 crc kubenswrapper[4644]: I1213 06:51:37.386103 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mfrqg" Dec 13 06:51:39 crc kubenswrapper[4644]: I1213 06:51:39.753914 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 06:51:39 crc kubenswrapper[4644]: I1213 06:51:39.754228 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.201853 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-7nrp6"] Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.202681 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.209746 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-7nrp6"] Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.306213 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgffb\" (UniqueName: \"kubernetes.io/projected/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-kube-api-access-tgffb\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.306284 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-trusted-ca\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.306473 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-registry-certificates\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.306551 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.306609 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-ca-trust-extracted\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.306648 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-bound-sa-token\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.306840 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-registry-tls\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.306911 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-installation-pull-secrets\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.324215 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.407649 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-registry-tls\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.407689 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-installation-pull-secrets\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.407720 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgffb\" (UniqueName: \"kubernetes.io/projected/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-kube-api-access-tgffb\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.407748 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-trusted-ca\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.407777 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-registry-certificates\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.407798 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-ca-trust-extracted\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.407817 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-bound-sa-token\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.409468 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-ca-trust-extracted\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.409797 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-trusted-ca\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.410019 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-registry-certificates\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.413278 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-installation-pull-secrets\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.413310 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-registry-tls\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.421411 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-bound-sa-token\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.422184 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgffb\" (UniqueName: \"kubernetes.io/projected/d9f37b0d-25de-4db4-ac5f-7f2c78394a96-kube-api-access-tgffb\") pod \"image-registry-66df7c8f76-7nrp6\" (UID: \"d9f37b0d-25de-4db4-ac5f-7f2c78394a96\") " pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.516934 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:50 crc kubenswrapper[4644]: I1213 06:51:50.671431 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-7nrp6"] Dec 13 06:51:51 crc kubenswrapper[4644]: I1213 06:51:51.433223 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" event={"ID":"d9f37b0d-25de-4db4-ac5f-7f2c78394a96","Type":"ContainerStarted","Data":"47872d81a460af439f176433f3d2849a04199656e95c9f57cd3c727a24010c12"} Dec 13 06:51:51 crc kubenswrapper[4644]: I1213 06:51:51.433274 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" event={"ID":"d9f37b0d-25de-4db4-ac5f-7f2c78394a96","Type":"ContainerStarted","Data":"ca12a1c6425efc24eb49272ddab17641107487d9d7504ac2cea333b900c836df"} Dec 13 06:51:51 crc kubenswrapper[4644]: I1213 06:51:51.433367 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:51:51 crc kubenswrapper[4644]: I1213 06:51:51.451471 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" podStartSLOduration=1.451453578 podStartE2EDuration="1.451453578s" podCreationTimestamp="2025-12-13 06:51:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:51:51.449632442 +0000 UTC m=+373.664583275" watchObservedRunningTime="2025-12-13 06:51:51.451453578 +0000 UTC m=+373.666404411" Dec 13 06:52:09 crc kubenswrapper[4644]: I1213 06:52:09.753774 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 06:52:09 crc kubenswrapper[4644]: I1213 06:52:09.754306 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 06:52:09 crc kubenswrapper[4644]: I1213 06:52:09.754362 4644 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:52:09 crc kubenswrapper[4644]: I1213 06:52:09.755018 4644 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"940bbee9b20bdd362b830e1ec88ca33332cf28bd753c6bedbbb57aa771fa906d"} pod="openshift-machine-config-operator/machine-config-daemon-45tj4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 06:52:09 crc kubenswrapper[4644]: I1213 06:52:09.755075 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" containerID="cri-o://940bbee9b20bdd362b830e1ec88ca33332cf28bd753c6bedbbb57aa771fa906d" gracePeriod=600 Dec 13 06:52:10 crc kubenswrapper[4644]: I1213 06:52:10.519148 4644 generic.go:334] "Generic (PLEG): container finished" podID="48240f19-087e-4597-b448-ab1a190a5027" containerID="940bbee9b20bdd362b830e1ec88ca33332cf28bd753c6bedbbb57aa771fa906d" exitCode=0 Dec 13 06:52:10 crc kubenswrapper[4644]: I1213 06:52:10.519229 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerDied","Data":"940bbee9b20bdd362b830e1ec88ca33332cf28bd753c6bedbbb57aa771fa906d"} Dec 13 06:52:10 crc kubenswrapper[4644]: I1213 06:52:10.519701 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerStarted","Data":"8a334c8ac9cfbcecc238f0daede728d1156a22a87c2dd064837832a2d8e79ebe"} Dec 13 06:52:10 crc kubenswrapper[4644]: I1213 06:52:10.519726 4644 scope.go:117] "RemoveContainer" containerID="cf60d7555e7d2aa08e39b6939322fe7a5a412e4baec77e4a6196bad800e67cb6" Dec 13 06:52:10 crc kubenswrapper[4644]: I1213 06:52:10.520250 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-7nrp6" Dec 13 06:52:10 crc kubenswrapper[4644]: I1213 06:52:10.582658 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2rfck"] Dec 13 06:52:35 crc kubenswrapper[4644]: I1213 06:52:35.608038 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" podUID="79103034-e1a4-44b1-bffc-e9edc76da393" containerName="registry" containerID="cri-o://a0ebfd010b18fafd67ae225a36055c020036ac2e73f433517e061c020436b561" gracePeriod=30 Dec 13 06:52:35 crc kubenswrapper[4644]: I1213 06:52:35.878881 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.021429 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/79103034-e1a4-44b1-bffc-e9edc76da393-ca-trust-extracted\") pod \"79103034-e1a4-44b1-bffc-e9edc76da393\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.021544 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/79103034-e1a4-44b1-bffc-e9edc76da393-trusted-ca\") pod \"79103034-e1a4-44b1-bffc-e9edc76da393\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.021572 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-bound-sa-token\") pod \"79103034-e1a4-44b1-bffc-e9edc76da393\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.021720 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"79103034-e1a4-44b1-bffc-e9edc76da393\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.021744 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/79103034-e1a4-44b1-bffc-e9edc76da393-installation-pull-secrets\") pod \"79103034-e1a4-44b1-bffc-e9edc76da393\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.021772 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhktw\" (UniqueName: \"kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-kube-api-access-zhktw\") pod \"79103034-e1a4-44b1-bffc-e9edc76da393\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.021795 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/79103034-e1a4-44b1-bffc-e9edc76da393-registry-certificates\") pod \"79103034-e1a4-44b1-bffc-e9edc76da393\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.021808 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-registry-tls\") pod \"79103034-e1a4-44b1-bffc-e9edc76da393\" (UID: \"79103034-e1a4-44b1-bffc-e9edc76da393\") " Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.022354 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79103034-e1a4-44b1-bffc-e9edc76da393-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "79103034-e1a4-44b1-bffc-e9edc76da393" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.022461 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79103034-e1a4-44b1-bffc-e9edc76da393-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "79103034-e1a4-44b1-bffc-e9edc76da393" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.022625 4644 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/79103034-e1a4-44b1-bffc-e9edc76da393-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.022658 4644 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/79103034-e1a4-44b1-bffc-e9edc76da393-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.026790 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "79103034-e1a4-44b1-bffc-e9edc76da393" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.027586 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79103034-e1a4-44b1-bffc-e9edc76da393-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "79103034-e1a4-44b1-bffc-e9edc76da393" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.027605 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-kube-api-access-zhktw" (OuterVolumeSpecName: "kube-api-access-zhktw") pod "79103034-e1a4-44b1-bffc-e9edc76da393" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393"). InnerVolumeSpecName "kube-api-access-zhktw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.029915 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "79103034-e1a4-44b1-bffc-e9edc76da393" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.030122 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "79103034-e1a4-44b1-bffc-e9edc76da393" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.036709 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79103034-e1a4-44b1-bffc-e9edc76da393-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "79103034-e1a4-44b1-bffc-e9edc76da393" (UID: "79103034-e1a4-44b1-bffc-e9edc76da393"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.123451 4644 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.123493 4644 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/79103034-e1a4-44b1-bffc-e9edc76da393-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.123507 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhktw\" (UniqueName: \"kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-kube-api-access-zhktw\") on node \"crc\" DevicePath \"\"" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.123518 4644 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/79103034-e1a4-44b1-bffc-e9edc76da393-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.123528 4644 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/79103034-e1a4-44b1-bffc-e9edc76da393-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.637184 4644 generic.go:334] "Generic (PLEG): container finished" podID="79103034-e1a4-44b1-bffc-e9edc76da393" containerID="a0ebfd010b18fafd67ae225a36055c020036ac2e73f433517e061c020436b561" exitCode=0 Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.637217 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" event={"ID":"79103034-e1a4-44b1-bffc-e9edc76da393","Type":"ContainerDied","Data":"a0ebfd010b18fafd67ae225a36055c020036ac2e73f433517e061c020436b561"} Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.637235 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.637252 4644 scope.go:117] "RemoveContainer" containerID="a0ebfd010b18fafd67ae225a36055c020036ac2e73f433517e061c020436b561" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.637242 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-2rfck" event={"ID":"79103034-e1a4-44b1-bffc-e9edc76da393","Type":"ContainerDied","Data":"356d12be20341c27ba13054f30a962732de0049488d0ad48156c6110f84479c7"} Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.650600 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2rfck"] Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.651761 4644 scope.go:117] "RemoveContainer" containerID="a0ebfd010b18fafd67ae225a36055c020036ac2e73f433517e061c020436b561" Dec 13 06:52:36 crc kubenswrapper[4644]: E1213 06:52:36.652186 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0ebfd010b18fafd67ae225a36055c020036ac2e73f433517e061c020436b561\": container with ID starting with a0ebfd010b18fafd67ae225a36055c020036ac2e73f433517e061c020436b561 not found: ID does not exist" containerID="a0ebfd010b18fafd67ae225a36055c020036ac2e73f433517e061c020436b561" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.652222 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0ebfd010b18fafd67ae225a36055c020036ac2e73f433517e061c020436b561"} err="failed to get container status \"a0ebfd010b18fafd67ae225a36055c020036ac2e73f433517e061c020436b561\": rpc error: code = NotFound desc = could not find container \"a0ebfd010b18fafd67ae225a36055c020036ac2e73f433517e061c020436b561\": container with ID starting with a0ebfd010b18fafd67ae225a36055c020036ac2e73f433517e061c020436b561 not found: ID does not exist" Dec 13 06:52:36 crc kubenswrapper[4644]: I1213 06:52:36.653530 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-2rfck"] Dec 13 06:52:38 crc kubenswrapper[4644]: I1213 06:52:38.400228 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79103034-e1a4-44b1-bffc-e9edc76da393" path="/var/lib/kubelet/pods/79103034-e1a4-44b1-bffc-e9edc76da393/volumes" Dec 13 06:54:09 crc kubenswrapper[4644]: I1213 06:54:09.754352 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 06:54:09 crc kubenswrapper[4644]: I1213 06:54:09.754942 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 06:54:39 crc kubenswrapper[4644]: I1213 06:54:39.753555 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 06:54:39 crc kubenswrapper[4644]: I1213 06:54:39.753942 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 06:55:09 crc kubenswrapper[4644]: I1213 06:55:09.753799 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 06:55:09 crc kubenswrapper[4644]: I1213 06:55:09.754142 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 06:55:09 crc kubenswrapper[4644]: I1213 06:55:09.754184 4644 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:55:09 crc kubenswrapper[4644]: I1213 06:55:09.754703 4644 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8a334c8ac9cfbcecc238f0daede728d1156a22a87c2dd064837832a2d8e79ebe"} pod="openshift-machine-config-operator/machine-config-daemon-45tj4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 06:55:09 crc kubenswrapper[4644]: I1213 06:55:09.754757 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" containerID="cri-o://8a334c8ac9cfbcecc238f0daede728d1156a22a87c2dd064837832a2d8e79ebe" gracePeriod=600 Dec 13 06:55:10 crc kubenswrapper[4644]: I1213 06:55:10.290785 4644 generic.go:334] "Generic (PLEG): container finished" podID="48240f19-087e-4597-b448-ab1a190a5027" containerID="8a334c8ac9cfbcecc238f0daede728d1156a22a87c2dd064837832a2d8e79ebe" exitCode=0 Dec 13 06:55:10 crc kubenswrapper[4644]: I1213 06:55:10.290856 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerDied","Data":"8a334c8ac9cfbcecc238f0daede728d1156a22a87c2dd064837832a2d8e79ebe"} Dec 13 06:55:10 crc kubenswrapper[4644]: I1213 06:55:10.291502 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerStarted","Data":"2487f8fbc172ccc82773d3da9a7aed4fb2a0c9cb73ab10d78d14719b9fd79f00"} Dec 13 06:55:10 crc kubenswrapper[4644]: I1213 06:55:10.291553 4644 scope.go:117] "RemoveContainer" containerID="940bbee9b20bdd362b830e1ec88ca33332cf28bd753c6bedbbb57aa771fa906d" Dec 13 06:55:17 crc kubenswrapper[4644]: I1213 06:55:17.991711 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-nmcw8"] Dec 13 06:55:17 crc kubenswrapper[4644]: E1213 06:55:17.992264 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79103034-e1a4-44b1-bffc-e9edc76da393" containerName="registry" Dec 13 06:55:17 crc kubenswrapper[4644]: I1213 06:55:17.992277 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="79103034-e1a4-44b1-bffc-e9edc76da393" containerName="registry" Dec 13 06:55:17 crc kubenswrapper[4644]: I1213 06:55:17.992369 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="79103034-e1a4-44b1-bffc-e9edc76da393" containerName="registry" Dec 13 06:55:17 crc kubenswrapper[4644]: I1213 06:55:17.992823 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-nmcw8" Dec 13 06:55:17 crc kubenswrapper[4644]: I1213 06:55:17.994700 4644 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-h564p" Dec 13 06:55:17 crc kubenswrapper[4644]: I1213 06:55:17.997835 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 13 06:55:17 crc kubenswrapper[4644]: I1213 06:55:17.997928 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 13 06:55:17 crc kubenswrapper[4644]: I1213 06:55:17.998047 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-n9bvw"] Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:17.998754 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-n9bvw" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:17.999728 4644 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-hl8bx" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.001649 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-nmcw8"] Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.009133 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-wvbx4"] Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.009830 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-wvbx4" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.012668 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-n9bvw"] Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.013483 4644 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-6lsc5" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.035305 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-wvbx4"] Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.059913 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bwp2\" (UniqueName: \"kubernetes.io/projected/528a14ec-8d24-4cd8-8bbe-bbd9871a1891-kube-api-access-9bwp2\") pod \"cert-manager-5b446d88c5-n9bvw\" (UID: \"528a14ec-8d24-4cd8-8bbe-bbd9871a1891\") " pod="cert-manager/cert-manager-5b446d88c5-n9bvw" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.059967 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q8lq\" (UniqueName: \"kubernetes.io/projected/1e9683a7-097e-46d6-87b0-2a55804f684f-kube-api-access-7q8lq\") pod \"cert-manager-webhook-5655c58dd6-wvbx4\" (UID: \"1e9683a7-097e-46d6-87b0-2a55804f684f\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-wvbx4" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.060011 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc4sj\" (UniqueName: \"kubernetes.io/projected/97814a84-2776-43f5-b7ce-9ac0cd79f716-kube-api-access-pc4sj\") pod \"cert-manager-cainjector-7f985d654d-nmcw8\" (UID: \"97814a84-2776-43f5-b7ce-9ac0cd79f716\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-nmcw8" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.161290 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bwp2\" (UniqueName: \"kubernetes.io/projected/528a14ec-8d24-4cd8-8bbe-bbd9871a1891-kube-api-access-9bwp2\") pod \"cert-manager-5b446d88c5-n9bvw\" (UID: \"528a14ec-8d24-4cd8-8bbe-bbd9871a1891\") " pod="cert-manager/cert-manager-5b446d88c5-n9bvw" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.161346 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q8lq\" (UniqueName: \"kubernetes.io/projected/1e9683a7-097e-46d6-87b0-2a55804f684f-kube-api-access-7q8lq\") pod \"cert-manager-webhook-5655c58dd6-wvbx4\" (UID: \"1e9683a7-097e-46d6-87b0-2a55804f684f\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-wvbx4" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.161388 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc4sj\" (UniqueName: \"kubernetes.io/projected/97814a84-2776-43f5-b7ce-9ac0cd79f716-kube-api-access-pc4sj\") pod \"cert-manager-cainjector-7f985d654d-nmcw8\" (UID: \"97814a84-2776-43f5-b7ce-9ac0cd79f716\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-nmcw8" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.179473 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bwp2\" (UniqueName: \"kubernetes.io/projected/528a14ec-8d24-4cd8-8bbe-bbd9871a1891-kube-api-access-9bwp2\") pod \"cert-manager-5b446d88c5-n9bvw\" (UID: \"528a14ec-8d24-4cd8-8bbe-bbd9871a1891\") " pod="cert-manager/cert-manager-5b446d88c5-n9bvw" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.179500 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc4sj\" (UniqueName: \"kubernetes.io/projected/97814a84-2776-43f5-b7ce-9ac0cd79f716-kube-api-access-pc4sj\") pod \"cert-manager-cainjector-7f985d654d-nmcw8\" (UID: \"97814a84-2776-43f5-b7ce-9ac0cd79f716\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-nmcw8" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.179813 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q8lq\" (UniqueName: \"kubernetes.io/projected/1e9683a7-097e-46d6-87b0-2a55804f684f-kube-api-access-7q8lq\") pod \"cert-manager-webhook-5655c58dd6-wvbx4\" (UID: \"1e9683a7-097e-46d6-87b0-2a55804f684f\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-wvbx4" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.312666 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-nmcw8" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.328563 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-n9bvw" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.341425 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-wvbx4" Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.681666 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-nmcw8"] Dec 13 06:55:18 crc kubenswrapper[4644]: W1213 06:55:18.687596 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97814a84_2776_43f5_b7ce_9ac0cd79f716.slice/crio-8bb99da4d475d017449d7c9a596fd538dc45d3648c4509fe7fd039913a801a8a WatchSource:0}: Error finding container 8bb99da4d475d017449d7c9a596fd538dc45d3648c4509fe7fd039913a801a8a: Status 404 returned error can't find the container with id 8bb99da4d475d017449d7c9a596fd538dc45d3648c4509fe7fd039913a801a8a Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.689941 4644 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.724188 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-wvbx4"] Dec 13 06:55:18 crc kubenswrapper[4644]: I1213 06:55:18.727065 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-n9bvw"] Dec 13 06:55:18 crc kubenswrapper[4644]: W1213 06:55:18.728771 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e9683a7_097e_46d6_87b0_2a55804f684f.slice/crio-bc185093b44503b06fbbb08018eed09259acb48acc31e44f85cbb4003b1f69d6 WatchSource:0}: Error finding container bc185093b44503b06fbbb08018eed09259acb48acc31e44f85cbb4003b1f69d6: Status 404 returned error can't find the container with id bc185093b44503b06fbbb08018eed09259acb48acc31e44f85cbb4003b1f69d6 Dec 13 06:55:19 crc kubenswrapper[4644]: I1213 06:55:19.332753 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-nmcw8" event={"ID":"97814a84-2776-43f5-b7ce-9ac0cd79f716","Type":"ContainerStarted","Data":"8bb99da4d475d017449d7c9a596fd538dc45d3648c4509fe7fd039913a801a8a"} Dec 13 06:55:19 crc kubenswrapper[4644]: I1213 06:55:19.333597 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-wvbx4" event={"ID":"1e9683a7-097e-46d6-87b0-2a55804f684f","Type":"ContainerStarted","Data":"bc185093b44503b06fbbb08018eed09259acb48acc31e44f85cbb4003b1f69d6"} Dec 13 06:55:19 crc kubenswrapper[4644]: I1213 06:55:19.334750 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-n9bvw" event={"ID":"528a14ec-8d24-4cd8-8bbe-bbd9871a1891","Type":"ContainerStarted","Data":"874566634bc10dac08480587893f4853464a8bedad976e7daea2104b49e22c45"} Dec 13 06:55:22 crc kubenswrapper[4644]: I1213 06:55:22.351354 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-wvbx4" event={"ID":"1e9683a7-097e-46d6-87b0-2a55804f684f","Type":"ContainerStarted","Data":"a386d0c8666227f93485dca9c5385e5c858c5ab7790114ff474faf0281cb3025"} Dec 13 06:55:22 crc kubenswrapper[4644]: I1213 06:55:22.351746 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-wvbx4" Dec 13 06:55:22 crc kubenswrapper[4644]: I1213 06:55:22.353889 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-n9bvw" event={"ID":"528a14ec-8d24-4cd8-8bbe-bbd9871a1891","Type":"ContainerStarted","Data":"a10070fcfa5d61415c2d5450e52e260c5b3cd5e52e085b1c70a2bba149025623"} Dec 13 06:55:22 crc kubenswrapper[4644]: I1213 06:55:22.355572 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-nmcw8" event={"ID":"97814a84-2776-43f5-b7ce-9ac0cd79f716","Type":"ContainerStarted","Data":"5b731dc86d3975821eaa33b5f12a187aaeb5ff465413396be45fc56bde4fc23f"} Dec 13 06:55:22 crc kubenswrapper[4644]: I1213 06:55:22.368257 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-wvbx4" podStartSLOduration=2.701470299 podStartE2EDuration="5.368240291s" podCreationTimestamp="2025-12-13 06:55:17 +0000 UTC" firstStartedPulling="2025-12-13 06:55:18.731743466 +0000 UTC m=+580.946694299" lastFinishedPulling="2025-12-13 06:55:21.398513458 +0000 UTC m=+583.613464291" observedRunningTime="2025-12-13 06:55:22.365621518 +0000 UTC m=+584.580572351" watchObservedRunningTime="2025-12-13 06:55:22.368240291 +0000 UTC m=+584.583191124" Dec 13 06:55:22 crc kubenswrapper[4644]: I1213 06:55:22.377820 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-nmcw8" podStartSLOduration=3.461260379 podStartE2EDuration="5.377803177s" podCreationTimestamp="2025-12-13 06:55:17 +0000 UTC" firstStartedPulling="2025-12-13 06:55:18.689695152 +0000 UTC m=+580.904645985" lastFinishedPulling="2025-12-13 06:55:20.60623795 +0000 UTC m=+582.821188783" observedRunningTime="2025-12-13 06:55:22.377358191 +0000 UTC m=+584.592309023" watchObservedRunningTime="2025-12-13 06:55:22.377803177 +0000 UTC m=+584.592754010" Dec 13 06:55:22 crc kubenswrapper[4644]: I1213 06:55:22.390969 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-n9bvw" podStartSLOduration=2.760367253 podStartE2EDuration="5.390953648s" podCreationTimestamp="2025-12-13 06:55:17 +0000 UTC" firstStartedPulling="2025-12-13 06:55:18.732828976 +0000 UTC m=+580.947779809" lastFinishedPulling="2025-12-13 06:55:21.363415371 +0000 UTC m=+583.578366204" observedRunningTime="2025-12-13 06:55:22.388808837 +0000 UTC m=+584.603759669" watchObservedRunningTime="2025-12-13 06:55:22.390953648 +0000 UTC m=+584.605904481" Dec 13 06:55:28 crc kubenswrapper[4644]: I1213 06:55:28.345012 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-wvbx4" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.490104 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-bj6c2"] Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.490421 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovn-controller" containerID="cri-o://905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a" gracePeriod=30 Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.490498 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="northd" containerID="cri-o://77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb" gracePeriod=30 Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.490605 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905" gracePeriod=30 Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.490612 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="nbdb" containerID="cri-o://692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec" gracePeriod=30 Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.490606 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="sbdb" containerID="cri-o://56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437" gracePeriod=30 Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.490639 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovn-acl-logging" containerID="cri-o://43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f" gracePeriod=30 Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.490572 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="kube-rbac-proxy-node" containerID="cri-o://5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203" gracePeriod=30 Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.517514 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" containerID="cri-o://8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b" gracePeriod=30 Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.760384 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovnkube-controller/3.log" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.763187 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovn-acl-logging/0.log" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.763746 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovn-controller/0.log" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.764313 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.804874 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-knhp9"] Dec 13 06:55:29 crc kubenswrapper[4644]: E1213 06:55:29.805061 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="kube-rbac-proxy-node" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805074 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="kube-rbac-proxy-node" Dec 13 06:55:29 crc kubenswrapper[4644]: E1213 06:55:29.805082 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="nbdb" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805088 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="nbdb" Dec 13 06:55:29 crc kubenswrapper[4644]: E1213 06:55:29.805095 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805101 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: E1213 06:55:29.805107 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovn-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805113 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovn-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: E1213 06:55:29.805120 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805125 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: E1213 06:55:29.805132 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="kube-rbac-proxy-ovn-metrics" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805138 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="kube-rbac-proxy-ovn-metrics" Dec 13 06:55:29 crc kubenswrapper[4644]: E1213 06:55:29.805145 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805150 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: E1213 06:55:29.805160 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="sbdb" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805166 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="sbdb" Dec 13 06:55:29 crc kubenswrapper[4644]: E1213 06:55:29.805176 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805181 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: E1213 06:55:29.805189 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="kubecfg-setup" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805194 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="kubecfg-setup" Dec 13 06:55:29 crc kubenswrapper[4644]: E1213 06:55:29.805201 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="northd" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805206 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="northd" Dec 13 06:55:29 crc kubenswrapper[4644]: E1213 06:55:29.805214 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovn-acl-logging" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805219 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovn-acl-logging" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805296 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805304 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="nbdb" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805311 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovn-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805319 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="sbdb" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805326 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805333 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="kube-rbac-proxy-node" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805340 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovn-acl-logging" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805349 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="kube-rbac-proxy-ovn-metrics" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805357 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805362 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="northd" Dec 13 06:55:29 crc kubenswrapper[4644]: E1213 06:55:29.805437 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805461 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805555 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.805565 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerName="ovnkube-controller" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.806851 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822250 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-openvswitch\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822297 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlbwf\" (UniqueName: \"kubernetes.io/projected/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-kube-api-access-zlbwf\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822335 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-ovn\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822364 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-log-socket\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822399 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-kubelet\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822413 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-run-ovn-kubernetes\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822429 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-cni-bin\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822504 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-systemd\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822536 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovnkube-config\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822553 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-etc-openvswitch\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822576 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-systemd-units\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822594 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovnkube-script-lib\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822608 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-var-lib-openvswitch\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822627 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822655 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovn-node-metrics-cert\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822696 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-env-overrides\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822714 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-run-netns\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822726 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-cni-netd\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822753 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-node-log\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822778 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-slash\") pod \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\" (UID: \"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d\") " Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822868 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822906 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822903 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822929 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822937 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.822976 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823101 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823179 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823194 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823181 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-node-log" (OuterVolumeSpecName: "node-log") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823207 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823226 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823236 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-slash" (OuterVolumeSpecName: "host-slash") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823346 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-kubelet\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823370 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-log-socket\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823395 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-node-log\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823411 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-run-ovn\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823429 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0d974a07-0ea6-489a-8e49-eb7112d853d0-ovnkube-config\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823428 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823507 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-log-socket" (OuterVolumeSpecName: "log-socket") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823529 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823545 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-cni-netd\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823597 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823601 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-slash\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823649 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-run-netns\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823696 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0d974a07-0ea6-489a-8e49-eb7112d853d0-ovnkube-script-lib\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823723 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-etc-openvswitch\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823770 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-cni-bin\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823794 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0d974a07-0ea6-489a-8e49-eb7112d853d0-env-overrides\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823811 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823832 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-run-systemd\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823849 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-systemd-units\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823903 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s94pn\" (UniqueName: \"kubernetes.io/projected/0d974a07-0ea6-489a-8e49-eb7112d853d0-kube-api-access-s94pn\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823930 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-run-ovn-kubernetes\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823946 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-var-lib-openvswitch\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.823980 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0d974a07-0ea6-489a-8e49-eb7112d853d0-ovn-node-metrics-cert\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824002 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-run-openvswitch\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824064 4644 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-log-socket\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824083 4644 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824094 4644 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824102 4644 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824114 4644 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824124 4644 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824134 4644 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824142 4644 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824150 4644 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824158 4644 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824168 4644 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824177 4644 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824185 4644 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824193 4644 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-node-log\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824200 4644 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-host-slash\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824209 4644 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.824218 4644 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.828041 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-kube-api-access-zlbwf" (OuterVolumeSpecName: "kube-api-access-zlbwf") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "kube-api-access-zlbwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.828192 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.835471 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" (UID: "03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.925982 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-etc-openvswitch\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926056 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0d974a07-0ea6-489a-8e49-eb7112d853d0-env-overrides\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926083 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-cni-bin\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926108 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926132 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-run-systemd\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926131 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-etc-openvswitch\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926208 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-systemd-units\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926209 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-cni-bin\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926256 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926159 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-systemd-units\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926269 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-run-systemd\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926311 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s94pn\" (UniqueName: \"kubernetes.io/projected/0d974a07-0ea6-489a-8e49-eb7112d853d0-kube-api-access-s94pn\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926352 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-run-ovn-kubernetes\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926374 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-var-lib-openvswitch\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926413 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-run-ovn-kubernetes\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926418 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0d974a07-0ea6-489a-8e49-eb7112d853d0-ovn-node-metrics-cert\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926493 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-run-openvswitch\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926557 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-kubelet\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926576 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-log-socket\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926604 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-node-log\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926618 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0d974a07-0ea6-489a-8e49-eb7112d853d0-env-overrides\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926641 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-run-ovn\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926625 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-run-ovn\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926664 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-run-openvswitch\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926666 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0d974a07-0ea6-489a-8e49-eb7112d853d0-ovnkube-config\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926734 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-cni-netd\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926794 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-slash\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926815 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-run-netns\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926847 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0d974a07-0ea6-489a-8e49-eb7112d853d0-ovnkube-script-lib\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926871 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-var-lib-openvswitch\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926914 4644 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926932 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlbwf\" (UniqueName: \"kubernetes.io/projected/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-kube-api-access-zlbwf\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926945 4644 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926946 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-slash\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926974 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-node-log\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.926998 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-run-netns\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.927026 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-kubelet\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.927050 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-log-socket\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.927087 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0d974a07-0ea6-489a-8e49-eb7112d853d0-ovnkube-config\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.927094 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0d974a07-0ea6-489a-8e49-eb7112d853d0-host-cni-netd\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.927379 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0d974a07-0ea6-489a-8e49-eb7112d853d0-ovnkube-script-lib\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.929570 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0d974a07-0ea6-489a-8e49-eb7112d853d0-ovn-node-metrics-cert\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:29 crc kubenswrapper[4644]: I1213 06:55:29.941335 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s94pn\" (UniqueName: \"kubernetes.io/projected/0d974a07-0ea6-489a-8e49-eb7112d853d0-kube-api-access-s94pn\") pod \"ovnkube-node-knhp9\" (UID: \"0d974a07-0ea6-489a-8e49-eb7112d853d0\") " pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.120666 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:30 crc kubenswrapper[4644]: W1213 06:55:30.137989 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d974a07_0ea6_489a_8e49_eb7112d853d0.slice/crio-e04cca13fd5304f4428b65219c7077adc5b3d54a707c65374532bd24249a1d24 WatchSource:0}: Error finding container e04cca13fd5304f4428b65219c7077adc5b3d54a707c65374532bd24249a1d24: Status 404 returned error can't find the container with id e04cca13fd5304f4428b65219c7077adc5b3d54a707c65374532bd24249a1d24 Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.394251 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lbk25_9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd/kube-multus/2.log" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.394953 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lbk25_9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd/kube-multus/1.log" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.395000 4644 generic.go:334] "Generic (PLEG): container finished" podID="9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd" containerID="9d22616d01fe1e38e58ad0bf123ce6e9fe9fbedeccfd84b0cef0254f22ff59a8" exitCode=2 Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.395814 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lbk25" event={"ID":"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd","Type":"ContainerDied","Data":"9d22616d01fe1e38e58ad0bf123ce6e9fe9fbedeccfd84b0cef0254f22ff59a8"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.395872 4644 scope.go:117] "RemoveContainer" containerID="68f72e73eb6fea5f57ed95c448c349e23ad92277af62f67c45217ef0c03e07e1" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.396221 4644 scope.go:117] "RemoveContainer" containerID="9d22616d01fe1e38e58ad0bf123ce6e9fe9fbedeccfd84b0cef0254f22ff59a8" Dec 13 06:55:30 crc kubenswrapper[4644]: E1213 06:55:30.396397 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-lbk25_openshift-multus(9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd)\"" pod="openshift-multus/multus-lbk25" podUID="9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.397755 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovnkube-controller/3.log" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.400298 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovn-acl-logging/0.log" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.400762 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-bj6c2_03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/ovn-controller/0.log" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401156 4644 generic.go:334] "Generic (PLEG): container finished" podID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerID="8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b" exitCode=0 Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401181 4644 generic.go:334] "Generic (PLEG): container finished" podID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerID="56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437" exitCode=0 Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401191 4644 generic.go:334] "Generic (PLEG): container finished" podID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerID="692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec" exitCode=0 Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401199 4644 generic.go:334] "Generic (PLEG): container finished" podID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerID="77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb" exitCode=0 Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401209 4644 generic.go:334] "Generic (PLEG): container finished" podID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerID="a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905" exitCode=0 Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401216 4644 generic.go:334] "Generic (PLEG): container finished" podID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerID="5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203" exitCode=0 Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401222 4644 generic.go:334] "Generic (PLEG): container finished" podID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerID="43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f" exitCode=143 Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401228 4644 generic.go:334] "Generic (PLEG): container finished" podID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" containerID="905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a" exitCode=143 Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401272 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerDied","Data":"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401303 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerDied","Data":"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401315 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerDied","Data":"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401325 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerDied","Data":"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401333 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerDied","Data":"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401343 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerDied","Data":"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401354 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401366 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401373 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401378 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401383 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401388 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401392 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401398 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401403 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401408 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401416 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerDied","Data":"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401424 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401430 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401436 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401460 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401466 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401473 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401479 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401485 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401492 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401498 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401505 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerDied","Data":"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401515 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401521 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401526 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401531 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401536 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401540 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401545 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401550 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401554 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401559 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401566 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" event={"ID":"03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d","Type":"ContainerDied","Data":"82d18638f84e8a5c89b2416b7b117d7bb0fa410cf86bd295adb83354d34550c0"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401572 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401579 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401584 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401590 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401595 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401601 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401605 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401610 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401614 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401619 4644 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.401582 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-bj6c2" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.403023 4644 generic.go:334] "Generic (PLEG): container finished" podID="0d974a07-0ea6-489a-8e49-eb7112d853d0" containerID="d474b3b9ec1de93f02e0fe93700c90b1e0a368b9b2fc23eb78156b1f2e41df5f" exitCode=0 Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.403057 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" event={"ID":"0d974a07-0ea6-489a-8e49-eb7112d853d0","Type":"ContainerDied","Data":"d474b3b9ec1de93f02e0fe93700c90b1e0a368b9b2fc23eb78156b1f2e41df5f"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.403078 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" event={"ID":"0d974a07-0ea6-489a-8e49-eb7112d853d0","Type":"ContainerStarted","Data":"e04cca13fd5304f4428b65219c7077adc5b3d54a707c65374532bd24249a1d24"} Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.424058 4644 scope.go:117] "RemoveContainer" containerID="8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.440759 4644 scope.go:117] "RemoveContainer" containerID="ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.457420 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-bj6c2"] Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.461027 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-bj6c2"] Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.468604 4644 scope.go:117] "RemoveContainer" containerID="56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.487249 4644 scope.go:117] "RemoveContainer" containerID="692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.500091 4644 scope.go:117] "RemoveContainer" containerID="77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.514635 4644 scope.go:117] "RemoveContainer" containerID="a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.537632 4644 scope.go:117] "RemoveContainer" containerID="5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.550525 4644 scope.go:117] "RemoveContainer" containerID="43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.565882 4644 scope.go:117] "RemoveContainer" containerID="905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.577774 4644 scope.go:117] "RemoveContainer" containerID="64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.590307 4644 scope.go:117] "RemoveContainer" containerID="8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b" Dec 13 06:55:30 crc kubenswrapper[4644]: E1213 06:55:30.590742 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b\": container with ID starting with 8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b not found: ID does not exist" containerID="8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.590781 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b"} err="failed to get container status \"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b\": rpc error: code = NotFound desc = could not find container \"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b\": container with ID starting with 8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.590806 4644 scope.go:117] "RemoveContainer" containerID="ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012" Dec 13 06:55:30 crc kubenswrapper[4644]: E1213 06:55:30.591156 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\": container with ID starting with ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012 not found: ID does not exist" containerID="ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.591176 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012"} err="failed to get container status \"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\": rpc error: code = NotFound desc = could not find container \"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\": container with ID starting with ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.591212 4644 scope.go:117] "RemoveContainer" containerID="56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437" Dec 13 06:55:30 crc kubenswrapper[4644]: E1213 06:55:30.591481 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\": container with ID starting with 56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437 not found: ID does not exist" containerID="56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.591518 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437"} err="failed to get container status \"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\": rpc error: code = NotFound desc = could not find container \"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\": container with ID starting with 56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.591544 4644 scope.go:117] "RemoveContainer" containerID="692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec" Dec 13 06:55:30 crc kubenswrapper[4644]: E1213 06:55:30.591784 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\": container with ID starting with 692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec not found: ID does not exist" containerID="692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.591836 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec"} err="failed to get container status \"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\": rpc error: code = NotFound desc = could not find container \"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\": container with ID starting with 692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.591851 4644 scope.go:117] "RemoveContainer" containerID="77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb" Dec 13 06:55:30 crc kubenswrapper[4644]: E1213 06:55:30.592069 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\": container with ID starting with 77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb not found: ID does not exist" containerID="77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.592098 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb"} err="failed to get container status \"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\": rpc error: code = NotFound desc = could not find container \"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\": container with ID starting with 77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.592115 4644 scope.go:117] "RemoveContainer" containerID="a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905" Dec 13 06:55:30 crc kubenswrapper[4644]: E1213 06:55:30.592457 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\": container with ID starting with a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905 not found: ID does not exist" containerID="a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.592486 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905"} err="failed to get container status \"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\": rpc error: code = NotFound desc = could not find container \"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\": container with ID starting with a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.592503 4644 scope.go:117] "RemoveContainer" containerID="5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203" Dec 13 06:55:30 crc kubenswrapper[4644]: E1213 06:55:30.592735 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\": container with ID starting with 5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203 not found: ID does not exist" containerID="5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.592765 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203"} err="failed to get container status \"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\": rpc error: code = NotFound desc = could not find container \"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\": container with ID starting with 5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.592780 4644 scope.go:117] "RemoveContainer" containerID="43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f" Dec 13 06:55:30 crc kubenswrapper[4644]: E1213 06:55:30.593035 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\": container with ID starting with 43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f not found: ID does not exist" containerID="43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.593056 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f"} err="failed to get container status \"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\": rpc error: code = NotFound desc = could not find container \"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\": container with ID starting with 43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.593071 4644 scope.go:117] "RemoveContainer" containerID="905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a" Dec 13 06:55:30 crc kubenswrapper[4644]: E1213 06:55:30.593281 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\": container with ID starting with 905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a not found: ID does not exist" containerID="905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.593303 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a"} err="failed to get container status \"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\": rpc error: code = NotFound desc = could not find container \"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\": container with ID starting with 905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.593318 4644 scope.go:117] "RemoveContainer" containerID="64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7" Dec 13 06:55:30 crc kubenswrapper[4644]: E1213 06:55:30.593544 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\": container with ID starting with 64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7 not found: ID does not exist" containerID="64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.593565 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7"} err="failed to get container status \"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\": rpc error: code = NotFound desc = could not find container \"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\": container with ID starting with 64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.593578 4644 scope.go:117] "RemoveContainer" containerID="8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.593779 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b"} err="failed to get container status \"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b\": rpc error: code = NotFound desc = could not find container \"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b\": container with ID starting with 8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.593797 4644 scope.go:117] "RemoveContainer" containerID="ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.594029 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012"} err="failed to get container status \"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\": rpc error: code = NotFound desc = could not find container \"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\": container with ID starting with ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.594053 4644 scope.go:117] "RemoveContainer" containerID="56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.594276 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437"} err="failed to get container status \"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\": rpc error: code = NotFound desc = could not find container \"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\": container with ID starting with 56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.594296 4644 scope.go:117] "RemoveContainer" containerID="692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.594682 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec"} err="failed to get container status \"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\": rpc error: code = NotFound desc = could not find container \"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\": container with ID starting with 692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.594703 4644 scope.go:117] "RemoveContainer" containerID="77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.594917 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb"} err="failed to get container status \"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\": rpc error: code = NotFound desc = could not find container \"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\": container with ID starting with 77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.594936 4644 scope.go:117] "RemoveContainer" containerID="a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.595236 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905"} err="failed to get container status \"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\": rpc error: code = NotFound desc = could not find container \"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\": container with ID starting with a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.595259 4644 scope.go:117] "RemoveContainer" containerID="5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.595530 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203"} err="failed to get container status \"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\": rpc error: code = NotFound desc = could not find container \"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\": container with ID starting with 5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.595547 4644 scope.go:117] "RemoveContainer" containerID="43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.595778 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f"} err="failed to get container status \"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\": rpc error: code = NotFound desc = could not find container \"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\": container with ID starting with 43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.595796 4644 scope.go:117] "RemoveContainer" containerID="905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.596032 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a"} err="failed to get container status \"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\": rpc error: code = NotFound desc = could not find container \"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\": container with ID starting with 905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.596051 4644 scope.go:117] "RemoveContainer" containerID="64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.596304 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7"} err="failed to get container status \"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\": rpc error: code = NotFound desc = could not find container \"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\": container with ID starting with 64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.596324 4644 scope.go:117] "RemoveContainer" containerID="8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.596540 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b"} err="failed to get container status \"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b\": rpc error: code = NotFound desc = could not find container \"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b\": container with ID starting with 8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.596563 4644 scope.go:117] "RemoveContainer" containerID="ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.596816 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012"} err="failed to get container status \"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\": rpc error: code = NotFound desc = could not find container \"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\": container with ID starting with ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.596835 4644 scope.go:117] "RemoveContainer" containerID="56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.597044 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437"} err="failed to get container status \"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\": rpc error: code = NotFound desc = could not find container \"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\": container with ID starting with 56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.597061 4644 scope.go:117] "RemoveContainer" containerID="692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.597276 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec"} err="failed to get container status \"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\": rpc error: code = NotFound desc = could not find container \"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\": container with ID starting with 692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.597293 4644 scope.go:117] "RemoveContainer" containerID="77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.598782 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb"} err="failed to get container status \"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\": rpc error: code = NotFound desc = could not find container \"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\": container with ID starting with 77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.598801 4644 scope.go:117] "RemoveContainer" containerID="a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.599168 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905"} err="failed to get container status \"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\": rpc error: code = NotFound desc = could not find container \"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\": container with ID starting with a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.599191 4644 scope.go:117] "RemoveContainer" containerID="5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.599414 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203"} err="failed to get container status \"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\": rpc error: code = NotFound desc = could not find container \"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\": container with ID starting with 5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.599433 4644 scope.go:117] "RemoveContainer" containerID="43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.599662 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f"} err="failed to get container status \"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\": rpc error: code = NotFound desc = could not find container \"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\": container with ID starting with 43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.599706 4644 scope.go:117] "RemoveContainer" containerID="905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.599968 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a"} err="failed to get container status \"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\": rpc error: code = NotFound desc = could not find container \"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\": container with ID starting with 905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.599990 4644 scope.go:117] "RemoveContainer" containerID="64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.600207 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7"} err="failed to get container status \"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\": rpc error: code = NotFound desc = could not find container \"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\": container with ID starting with 64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.600229 4644 scope.go:117] "RemoveContainer" containerID="8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.600461 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b"} err="failed to get container status \"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b\": rpc error: code = NotFound desc = could not find container \"8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b\": container with ID starting with 8a7dc068175ca0ff7380025aa5961f0b81dae14310e31a3b74c80acbab856a1b not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.600479 4644 scope.go:117] "RemoveContainer" containerID="ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.600787 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012"} err="failed to get container status \"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\": rpc error: code = NotFound desc = could not find container \"ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012\": container with ID starting with ff7d476b701faea9924351c3590853cdf0ac5104547824a9479fc90b735ec012 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.600810 4644 scope.go:117] "RemoveContainer" containerID="56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.601143 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437"} err="failed to get container status \"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\": rpc error: code = NotFound desc = could not find container \"56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437\": container with ID starting with 56b48c88277bb238e4986599f4a3e151ada2b5ad1f0807e16c0ba5cb06373437 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.601162 4644 scope.go:117] "RemoveContainer" containerID="692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.601411 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec"} err="failed to get container status \"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\": rpc error: code = NotFound desc = could not find container \"692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec\": container with ID starting with 692726fc2597010893783ea8c6051de5ff7e966a7185e49f984312b1850f2eec not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.601429 4644 scope.go:117] "RemoveContainer" containerID="77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.601792 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb"} err="failed to get container status \"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\": rpc error: code = NotFound desc = could not find container \"77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb\": container with ID starting with 77f0e88f13144c15380f32058a1f9408d40d0129f84a17ca71b875e294bfebdb not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.601818 4644 scope.go:117] "RemoveContainer" containerID="a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.602085 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905"} err="failed to get container status \"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\": rpc error: code = NotFound desc = could not find container \"a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905\": container with ID starting with a9f6eca97caa7c4137489ab75896f0cf734abb39ce8251cdec93d82edd3fc905 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.602108 4644 scope.go:117] "RemoveContainer" containerID="5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.602345 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203"} err="failed to get container status \"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\": rpc error: code = NotFound desc = could not find container \"5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203\": container with ID starting with 5e25cbe736048011c1262a27f5e380444b68743e3069f1fd8484ea9597fe5203 not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.602365 4644 scope.go:117] "RemoveContainer" containerID="43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.602607 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f"} err="failed to get container status \"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\": rpc error: code = NotFound desc = could not find container \"43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f\": container with ID starting with 43521d22c0953690b5dce005acdb79517139872b74aed0c8463a88497ff5496f not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.602629 4644 scope.go:117] "RemoveContainer" containerID="905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.602838 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a"} err="failed to get container status \"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\": rpc error: code = NotFound desc = could not find container \"905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a\": container with ID starting with 905e097c0f32b9df58ed75c2c59e5d8e76fb841eab2ada2947c305f5e946265a not found: ID does not exist" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.602859 4644 scope.go:117] "RemoveContainer" containerID="64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7" Dec 13 06:55:30 crc kubenswrapper[4644]: I1213 06:55:30.603326 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7"} err="failed to get container status \"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\": rpc error: code = NotFound desc = could not find container \"64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7\": container with ID starting with 64180c0f0200d1b93d03a7cf55f1a4f6c2e828d7893df099b5618f8d091ffdd7 not found: ID does not exist" Dec 13 06:55:31 crc kubenswrapper[4644]: I1213 06:55:31.412009 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" event={"ID":"0d974a07-0ea6-489a-8e49-eb7112d853d0","Type":"ContainerStarted","Data":"277da4c89b8f661350b79ffe98ddf98ccec92e8f7841999825fd8ec009893a35"} Dec 13 06:55:31 crc kubenswrapper[4644]: I1213 06:55:31.412319 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" event={"ID":"0d974a07-0ea6-489a-8e49-eb7112d853d0","Type":"ContainerStarted","Data":"8092737a9b16f558c67ce40cfc4446c80594932a586c0d4fe592ee73c0ee3a75"} Dec 13 06:55:31 crc kubenswrapper[4644]: I1213 06:55:31.412332 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" event={"ID":"0d974a07-0ea6-489a-8e49-eb7112d853d0","Type":"ContainerStarted","Data":"700a08a46b77710bc0c8522abaab358dff66cecc0a24e42deec62268ccab7d2f"} Dec 13 06:55:31 crc kubenswrapper[4644]: I1213 06:55:31.412341 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" event={"ID":"0d974a07-0ea6-489a-8e49-eb7112d853d0","Type":"ContainerStarted","Data":"a99b12e74d948c454548bf067be19c3563ac63979cd4eb07039c5b9ad12777a6"} Dec 13 06:55:31 crc kubenswrapper[4644]: I1213 06:55:31.412350 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" event={"ID":"0d974a07-0ea6-489a-8e49-eb7112d853d0","Type":"ContainerStarted","Data":"714cd7a4fd4fd0447b7d6fd5bb07e86e20bebcccc508a6d4aa4d66d606d0ae4a"} Dec 13 06:55:31 crc kubenswrapper[4644]: I1213 06:55:31.412358 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" event={"ID":"0d974a07-0ea6-489a-8e49-eb7112d853d0","Type":"ContainerStarted","Data":"03bf91026c8cd52173f2d1e614308baa1ee33240c3d2eaf0de448a5e61680ba0"} Dec 13 06:55:31 crc kubenswrapper[4644]: I1213 06:55:31.413849 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lbk25_9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd/kube-multus/2.log" Dec 13 06:55:32 crc kubenswrapper[4644]: I1213 06:55:32.395009 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d" path="/var/lib/kubelet/pods/03c8ecd2-640b-4ecf-88a6-0c28ea7fd21d/volumes" Dec 13 06:55:33 crc kubenswrapper[4644]: I1213 06:55:33.426267 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" event={"ID":"0d974a07-0ea6-489a-8e49-eb7112d853d0","Type":"ContainerStarted","Data":"963cc9e0bee9adea8b50ab92923a7d4636ecd12b2804b1fa62f4db2665bc6e5c"} Dec 13 06:55:35 crc kubenswrapper[4644]: I1213 06:55:35.439867 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" event={"ID":"0d974a07-0ea6-489a-8e49-eb7112d853d0","Type":"ContainerStarted","Data":"29bf42a8a47bf2037393fa038ba4052b2d0a975b30776602ff78928b11f256b0"} Dec 13 06:55:35 crc kubenswrapper[4644]: I1213 06:55:35.441436 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:35 crc kubenswrapper[4644]: I1213 06:55:35.441474 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:35 crc kubenswrapper[4644]: I1213 06:55:35.441483 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:35 crc kubenswrapper[4644]: I1213 06:55:35.468807 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" podStartSLOduration=6.468792446 podStartE2EDuration="6.468792446s" podCreationTimestamp="2025-12-13 06:55:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:55:35.466550213 +0000 UTC m=+597.681501047" watchObservedRunningTime="2025-12-13 06:55:35.468792446 +0000 UTC m=+597.683743278" Dec 13 06:55:35 crc kubenswrapper[4644]: I1213 06:55:35.473606 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:35 crc kubenswrapper[4644]: I1213 06:55:35.478954 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:55:42 crc kubenswrapper[4644]: I1213 06:55:42.390166 4644 scope.go:117] "RemoveContainer" containerID="9d22616d01fe1e38e58ad0bf123ce6e9fe9fbedeccfd84b0cef0254f22ff59a8" Dec 13 06:55:42 crc kubenswrapper[4644]: E1213 06:55:42.390734 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-lbk25_openshift-multus(9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd)\"" pod="openshift-multus/multus-lbk25" podUID="9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd" Dec 13 06:55:55 crc kubenswrapper[4644]: I1213 06:55:55.389010 4644 scope.go:117] "RemoveContainer" containerID="9d22616d01fe1e38e58ad0bf123ce6e9fe9fbedeccfd84b0cef0254f22ff59a8" Dec 13 06:55:55 crc kubenswrapper[4644]: I1213 06:55:55.514525 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lbk25_9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd/kube-multus/2.log" Dec 13 06:55:55 crc kubenswrapper[4644]: I1213 06:55:55.514580 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lbk25" event={"ID":"9758dfeb-e01d-4dc8-96a9-5b4ec6746bdd","Type":"ContainerStarted","Data":"f9f9913a631f79d99b5df2788659477a7b9080df8cff2222edcb29fbbad515aa"} Dec 13 06:56:00 crc kubenswrapper[4644]: I1213 06:56:00.141032 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-knhp9" Dec 13 06:56:02 crc kubenswrapper[4644]: I1213 06:56:02.308220 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw"] Dec 13 06:56:02 crc kubenswrapper[4644]: I1213 06:56:02.309361 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" Dec 13 06:56:02 crc kubenswrapper[4644]: I1213 06:56:02.311099 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 13 06:56:02 crc kubenswrapper[4644]: I1213 06:56:02.317215 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw"] Dec 13 06:56:02 crc kubenswrapper[4644]: I1213 06:56:02.401591 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ef428abc-94fc-46e3-9668-d4ba73a82bf0-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw\" (UID: \"ef428abc-94fc-46e3-9668-d4ba73a82bf0\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" Dec 13 06:56:02 crc kubenswrapper[4644]: I1213 06:56:02.401668 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9rzr\" (UniqueName: \"kubernetes.io/projected/ef428abc-94fc-46e3-9668-d4ba73a82bf0-kube-api-access-k9rzr\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw\" (UID: \"ef428abc-94fc-46e3-9668-d4ba73a82bf0\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" Dec 13 06:56:02 crc kubenswrapper[4644]: I1213 06:56:02.401842 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ef428abc-94fc-46e3-9668-d4ba73a82bf0-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw\" (UID: \"ef428abc-94fc-46e3-9668-d4ba73a82bf0\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" Dec 13 06:56:02 crc kubenswrapper[4644]: I1213 06:56:02.503387 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ef428abc-94fc-46e3-9668-d4ba73a82bf0-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw\" (UID: \"ef428abc-94fc-46e3-9668-d4ba73a82bf0\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" Dec 13 06:56:02 crc kubenswrapper[4644]: I1213 06:56:02.503465 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9rzr\" (UniqueName: \"kubernetes.io/projected/ef428abc-94fc-46e3-9668-d4ba73a82bf0-kube-api-access-k9rzr\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw\" (UID: \"ef428abc-94fc-46e3-9668-d4ba73a82bf0\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" Dec 13 06:56:02 crc kubenswrapper[4644]: I1213 06:56:02.503538 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ef428abc-94fc-46e3-9668-d4ba73a82bf0-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw\" (UID: \"ef428abc-94fc-46e3-9668-d4ba73a82bf0\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" Dec 13 06:56:02 crc kubenswrapper[4644]: I1213 06:56:02.503923 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ef428abc-94fc-46e3-9668-d4ba73a82bf0-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw\" (UID: \"ef428abc-94fc-46e3-9668-d4ba73a82bf0\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" Dec 13 06:56:02 crc kubenswrapper[4644]: I1213 06:56:02.503931 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ef428abc-94fc-46e3-9668-d4ba73a82bf0-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw\" (UID: \"ef428abc-94fc-46e3-9668-d4ba73a82bf0\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" Dec 13 06:56:02 crc kubenswrapper[4644]: I1213 06:56:02.520380 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9rzr\" (UniqueName: \"kubernetes.io/projected/ef428abc-94fc-46e3-9668-d4ba73a82bf0-kube-api-access-k9rzr\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw\" (UID: \"ef428abc-94fc-46e3-9668-d4ba73a82bf0\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" Dec 13 06:56:02 crc kubenswrapper[4644]: I1213 06:56:02.622748 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" Dec 13 06:56:02 crc kubenswrapper[4644]: I1213 06:56:02.774205 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw"] Dec 13 06:56:03 crc kubenswrapper[4644]: I1213 06:56:03.551987 4644 generic.go:334] "Generic (PLEG): container finished" podID="ef428abc-94fc-46e3-9668-d4ba73a82bf0" containerID="194cb6d58cb68cdf9fc476f075b3ce5a40363a18a64dcf16d872e4e402039299" exitCode=0 Dec 13 06:56:03 crc kubenswrapper[4644]: I1213 06:56:03.552097 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" event={"ID":"ef428abc-94fc-46e3-9668-d4ba73a82bf0","Type":"ContainerDied","Data":"194cb6d58cb68cdf9fc476f075b3ce5a40363a18a64dcf16d872e4e402039299"} Dec 13 06:56:03 crc kubenswrapper[4644]: I1213 06:56:03.552727 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" event={"ID":"ef428abc-94fc-46e3-9668-d4ba73a82bf0","Type":"ContainerStarted","Data":"27cc12d70412ae379d1866ba750546b4712179b116d5277402e1005d89faf899"} Dec 13 06:56:05 crc kubenswrapper[4644]: I1213 06:56:05.564134 4644 generic.go:334] "Generic (PLEG): container finished" podID="ef428abc-94fc-46e3-9668-d4ba73a82bf0" containerID="c4f9e8c7177bdc7590451fd13e91296df026cf72e9e217eddacee2ccdf6b9621" exitCode=0 Dec 13 06:56:05 crc kubenswrapper[4644]: I1213 06:56:05.564238 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" event={"ID":"ef428abc-94fc-46e3-9668-d4ba73a82bf0","Type":"ContainerDied","Data":"c4f9e8c7177bdc7590451fd13e91296df026cf72e9e217eddacee2ccdf6b9621"} Dec 13 06:56:06 crc kubenswrapper[4644]: I1213 06:56:06.570243 4644 generic.go:334] "Generic (PLEG): container finished" podID="ef428abc-94fc-46e3-9668-d4ba73a82bf0" containerID="bdfda782df698f5f8544a1a4cf2c3915669e7b1ac2eb359134ad008ef289a3af" exitCode=0 Dec 13 06:56:06 crc kubenswrapper[4644]: I1213 06:56:06.570307 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" event={"ID":"ef428abc-94fc-46e3-9668-d4ba73a82bf0","Type":"ContainerDied","Data":"bdfda782df698f5f8544a1a4cf2c3915669e7b1ac2eb359134ad008ef289a3af"} Dec 13 06:56:07 crc kubenswrapper[4644]: I1213 06:56:07.760358 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" Dec 13 06:56:07 crc kubenswrapper[4644]: I1213 06:56:07.877362 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ef428abc-94fc-46e3-9668-d4ba73a82bf0-bundle\") pod \"ef428abc-94fc-46e3-9668-d4ba73a82bf0\" (UID: \"ef428abc-94fc-46e3-9668-d4ba73a82bf0\") " Dec 13 06:56:07 crc kubenswrapper[4644]: I1213 06:56:07.877678 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9rzr\" (UniqueName: \"kubernetes.io/projected/ef428abc-94fc-46e3-9668-d4ba73a82bf0-kube-api-access-k9rzr\") pod \"ef428abc-94fc-46e3-9668-d4ba73a82bf0\" (UID: \"ef428abc-94fc-46e3-9668-d4ba73a82bf0\") " Dec 13 06:56:07 crc kubenswrapper[4644]: I1213 06:56:07.877723 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ef428abc-94fc-46e3-9668-d4ba73a82bf0-util\") pod \"ef428abc-94fc-46e3-9668-d4ba73a82bf0\" (UID: \"ef428abc-94fc-46e3-9668-d4ba73a82bf0\") " Dec 13 06:56:07 crc kubenswrapper[4644]: I1213 06:56:07.878289 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef428abc-94fc-46e3-9668-d4ba73a82bf0-bundle" (OuterVolumeSpecName: "bundle") pod "ef428abc-94fc-46e3-9668-d4ba73a82bf0" (UID: "ef428abc-94fc-46e3-9668-d4ba73a82bf0"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:56:07 crc kubenswrapper[4644]: I1213 06:56:07.883191 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef428abc-94fc-46e3-9668-d4ba73a82bf0-kube-api-access-k9rzr" (OuterVolumeSpecName: "kube-api-access-k9rzr") pod "ef428abc-94fc-46e3-9668-d4ba73a82bf0" (UID: "ef428abc-94fc-46e3-9668-d4ba73a82bf0"). InnerVolumeSpecName "kube-api-access-k9rzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:56:07 crc kubenswrapper[4644]: I1213 06:56:07.887702 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef428abc-94fc-46e3-9668-d4ba73a82bf0-util" (OuterVolumeSpecName: "util") pod "ef428abc-94fc-46e3-9668-d4ba73a82bf0" (UID: "ef428abc-94fc-46e3-9668-d4ba73a82bf0"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:56:07 crc kubenswrapper[4644]: I1213 06:56:07.978882 4644 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ef428abc-94fc-46e3-9668-d4ba73a82bf0-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 06:56:07 crc kubenswrapper[4644]: I1213 06:56:07.978924 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9rzr\" (UniqueName: \"kubernetes.io/projected/ef428abc-94fc-46e3-9668-d4ba73a82bf0-kube-api-access-k9rzr\") on node \"crc\" DevicePath \"\"" Dec 13 06:56:07 crc kubenswrapper[4644]: I1213 06:56:07.978936 4644 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ef428abc-94fc-46e3-9668-d4ba73a82bf0-util\") on node \"crc\" DevicePath \"\"" Dec 13 06:56:08 crc kubenswrapper[4644]: I1213 06:56:08.581836 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" event={"ID":"ef428abc-94fc-46e3-9668-d4ba73a82bf0","Type":"ContainerDied","Data":"27cc12d70412ae379d1866ba750546b4712179b116d5277402e1005d89faf899"} Dec 13 06:56:08 crc kubenswrapper[4644]: I1213 06:56:08.581872 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27cc12d70412ae379d1866ba750546b4712179b116d5277402e1005d89faf899" Dec 13 06:56:08 crc kubenswrapper[4644]: I1213 06:56:08.581941 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw" Dec 13 06:56:09 crc kubenswrapper[4644]: I1213 06:56:09.937937 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-m8zfg"] Dec 13 06:56:09 crc kubenswrapper[4644]: E1213 06:56:09.938115 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef428abc-94fc-46e3-9668-d4ba73a82bf0" containerName="pull" Dec 13 06:56:09 crc kubenswrapper[4644]: I1213 06:56:09.938126 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef428abc-94fc-46e3-9668-d4ba73a82bf0" containerName="pull" Dec 13 06:56:09 crc kubenswrapper[4644]: E1213 06:56:09.938139 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef428abc-94fc-46e3-9668-d4ba73a82bf0" containerName="util" Dec 13 06:56:09 crc kubenswrapper[4644]: I1213 06:56:09.938145 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef428abc-94fc-46e3-9668-d4ba73a82bf0" containerName="util" Dec 13 06:56:09 crc kubenswrapper[4644]: E1213 06:56:09.938155 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef428abc-94fc-46e3-9668-d4ba73a82bf0" containerName="extract" Dec 13 06:56:09 crc kubenswrapper[4644]: I1213 06:56:09.938161 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef428abc-94fc-46e3-9668-d4ba73a82bf0" containerName="extract" Dec 13 06:56:09 crc kubenswrapper[4644]: I1213 06:56:09.938237 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef428abc-94fc-46e3-9668-d4ba73a82bf0" containerName="extract" Dec 13 06:56:09 crc kubenswrapper[4644]: I1213 06:56:09.938562 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-m8zfg" Dec 13 06:56:09 crc kubenswrapper[4644]: I1213 06:56:09.940751 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-csnd2" Dec 13 06:56:09 crc kubenswrapper[4644]: I1213 06:56:09.941010 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 13 06:56:09 crc kubenswrapper[4644]: I1213 06:56:09.941173 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 13 06:56:09 crc kubenswrapper[4644]: I1213 06:56:09.950953 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-m8zfg"] Dec 13 06:56:10 crc kubenswrapper[4644]: I1213 06:56:10.002528 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t48pp\" (UniqueName: \"kubernetes.io/projected/4a77404c-19db-4d2a-bb02-c6e81d7b6f0f-kube-api-access-t48pp\") pod \"nmstate-operator-6769fb99d-m8zfg\" (UID: \"4a77404c-19db-4d2a-bb02-c6e81d7b6f0f\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-m8zfg" Dec 13 06:56:10 crc kubenswrapper[4644]: I1213 06:56:10.103963 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t48pp\" (UniqueName: \"kubernetes.io/projected/4a77404c-19db-4d2a-bb02-c6e81d7b6f0f-kube-api-access-t48pp\") pod \"nmstate-operator-6769fb99d-m8zfg\" (UID: \"4a77404c-19db-4d2a-bb02-c6e81d7b6f0f\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-m8zfg" Dec 13 06:56:10 crc kubenswrapper[4644]: I1213 06:56:10.120666 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t48pp\" (UniqueName: \"kubernetes.io/projected/4a77404c-19db-4d2a-bb02-c6e81d7b6f0f-kube-api-access-t48pp\") pod \"nmstate-operator-6769fb99d-m8zfg\" (UID: \"4a77404c-19db-4d2a-bb02-c6e81d7b6f0f\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-m8zfg" Dec 13 06:56:10 crc kubenswrapper[4644]: I1213 06:56:10.251232 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-m8zfg" Dec 13 06:56:10 crc kubenswrapper[4644]: I1213 06:56:10.405269 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-m8zfg"] Dec 13 06:56:10 crc kubenswrapper[4644]: W1213 06:56:10.409602 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4a77404c_19db_4d2a_bb02_c6e81d7b6f0f.slice/crio-2d56e7a4f84d594d9c38cf5bddf0af78a31f6a1f933acb4bf663c04bcc6c0c98 WatchSource:0}: Error finding container 2d56e7a4f84d594d9c38cf5bddf0af78a31f6a1f933acb4bf663c04bcc6c0c98: Status 404 returned error can't find the container with id 2d56e7a4f84d594d9c38cf5bddf0af78a31f6a1f933acb4bf663c04bcc6c0c98 Dec 13 06:56:10 crc kubenswrapper[4644]: I1213 06:56:10.591186 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-m8zfg" event={"ID":"4a77404c-19db-4d2a-bb02-c6e81d7b6f0f","Type":"ContainerStarted","Data":"2d56e7a4f84d594d9c38cf5bddf0af78a31f6a1f933acb4bf663c04bcc6c0c98"} Dec 13 06:56:13 crc kubenswrapper[4644]: I1213 06:56:13.606997 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-m8zfg" event={"ID":"4a77404c-19db-4d2a-bb02-c6e81d7b6f0f","Type":"ContainerStarted","Data":"03105474b7c5d40b1790952668bb6663bb6232a8465043bc48706a792a37a66f"} Dec 13 06:56:13 crc kubenswrapper[4644]: I1213 06:56:13.621378 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-6769fb99d-m8zfg" podStartSLOduration=2.425634384 podStartE2EDuration="4.621354434s" podCreationTimestamp="2025-12-13 06:56:09 +0000 UTC" firstStartedPulling="2025-12-13 06:56:10.412021684 +0000 UTC m=+632.626972517" lastFinishedPulling="2025-12-13 06:56:12.607741734 +0000 UTC m=+634.822692567" observedRunningTime="2025-12-13 06:56:13.619427213 +0000 UTC m=+635.834378046" watchObservedRunningTime="2025-12-13 06:56:13.621354434 +0000 UTC m=+635.836305266" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.412361 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-85swm"] Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.413617 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-85swm" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.415916 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-kd64n" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.431144 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-85swm"] Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.434283 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-ck7qt"] Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.435219 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.440497 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-p84qq"] Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.446421 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-p84qq" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.450007 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.471702 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/4c4e58d7-38d7-499f-91bf-eda13e345a14-nmstate-lock\") pod \"nmstate-handler-ck7qt\" (UID: \"4c4e58d7-38d7-499f-91bf-eda13e345a14\") " pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.472067 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psxl7\" (UniqueName: \"kubernetes.io/projected/4a8400f2-9d22-4c57-95fc-9f9a4a7bd0d7-kube-api-access-psxl7\") pod \"nmstate-metrics-7f7f7578db-85swm\" (UID: \"4a8400f2-9d22-4c57-95fc-9f9a4a7bd0d7\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-85swm" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.472605 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/4c4e58d7-38d7-499f-91bf-eda13e345a14-ovs-socket\") pod \"nmstate-handler-ck7qt\" (UID: \"4c4e58d7-38d7-499f-91bf-eda13e345a14\") " pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.472902 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/4c4e58d7-38d7-499f-91bf-eda13e345a14-dbus-socket\") pod \"nmstate-handler-ck7qt\" (UID: \"4c4e58d7-38d7-499f-91bf-eda13e345a14\") " pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.473115 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65ftt\" (UniqueName: \"kubernetes.io/projected/4c4e58d7-38d7-499f-91bf-eda13e345a14-kube-api-access-65ftt\") pod \"nmstate-handler-ck7qt\" (UID: \"4c4e58d7-38d7-499f-91bf-eda13e345a14\") " pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.475562 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-p84qq"] Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.534193 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt"] Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.535014 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.537278 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.537464 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.537593 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-vhdgs" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.551867 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt"] Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.575175 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/4c4e58d7-38d7-499f-91bf-eda13e345a14-nmstate-lock\") pod \"nmstate-handler-ck7qt\" (UID: \"4c4e58d7-38d7-499f-91bf-eda13e345a14\") " pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.575510 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psxl7\" (UniqueName: \"kubernetes.io/projected/4a8400f2-9d22-4c57-95fc-9f9a4a7bd0d7-kube-api-access-psxl7\") pod \"nmstate-metrics-7f7f7578db-85swm\" (UID: \"4a8400f2-9d22-4c57-95fc-9f9a4a7bd0d7\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-85swm" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.575351 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/4c4e58d7-38d7-499f-91bf-eda13e345a14-nmstate-lock\") pod \"nmstate-handler-ck7qt\" (UID: \"4c4e58d7-38d7-499f-91bf-eda13e345a14\") " pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.575579 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6f0a5aec-c142-49fa-bece-020038485089-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-ftvkt\" (UID: \"6f0a5aec-c142-49fa-bece-020038485089\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.575602 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/91c32695-2077-4074-9e11-424ea074c4a6-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-p84qq\" (UID: \"91c32695-2077-4074-9e11-424ea074c4a6\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-p84qq" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.575627 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/4c4e58d7-38d7-499f-91bf-eda13e345a14-ovs-socket\") pod \"nmstate-handler-ck7qt\" (UID: \"4c4e58d7-38d7-499f-91bf-eda13e345a14\") " pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.575757 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/4c4e58d7-38d7-499f-91bf-eda13e345a14-ovs-socket\") pod \"nmstate-handler-ck7qt\" (UID: \"4c4e58d7-38d7-499f-91bf-eda13e345a14\") " pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.575835 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gvrw\" (UniqueName: \"kubernetes.io/projected/6f0a5aec-c142-49fa-bece-020038485089-kube-api-access-6gvrw\") pod \"nmstate-console-plugin-6ff7998486-ftvkt\" (UID: \"6f0a5aec-c142-49fa-bece-020038485089\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.575910 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/4c4e58d7-38d7-499f-91bf-eda13e345a14-dbus-socket\") pod \"nmstate-handler-ck7qt\" (UID: \"4c4e58d7-38d7-499f-91bf-eda13e345a14\") " pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.575963 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65ftt\" (UniqueName: \"kubernetes.io/projected/4c4e58d7-38d7-499f-91bf-eda13e345a14-kube-api-access-65ftt\") pod \"nmstate-handler-ck7qt\" (UID: \"4c4e58d7-38d7-499f-91bf-eda13e345a14\") " pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.576014 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/6f0a5aec-c142-49fa-bece-020038485089-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-ftvkt\" (UID: \"6f0a5aec-c142-49fa-bece-020038485089\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.576071 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q8jk\" (UniqueName: \"kubernetes.io/projected/91c32695-2077-4074-9e11-424ea074c4a6-kube-api-access-5q8jk\") pod \"nmstate-webhook-f8fb84555-p84qq\" (UID: \"91c32695-2077-4074-9e11-424ea074c4a6\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-p84qq" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.576192 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/4c4e58d7-38d7-499f-91bf-eda13e345a14-dbus-socket\") pod \"nmstate-handler-ck7qt\" (UID: \"4c4e58d7-38d7-499f-91bf-eda13e345a14\") " pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.594074 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65ftt\" (UniqueName: \"kubernetes.io/projected/4c4e58d7-38d7-499f-91bf-eda13e345a14-kube-api-access-65ftt\") pod \"nmstate-handler-ck7qt\" (UID: \"4c4e58d7-38d7-499f-91bf-eda13e345a14\") " pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.610183 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psxl7\" (UniqueName: \"kubernetes.io/projected/4a8400f2-9d22-4c57-95fc-9f9a4a7bd0d7-kube-api-access-psxl7\") pod \"nmstate-metrics-7f7f7578db-85swm\" (UID: \"4a8400f2-9d22-4c57-95fc-9f9a4a7bd0d7\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-85swm" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.677471 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/6f0a5aec-c142-49fa-bece-020038485089-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-ftvkt\" (UID: \"6f0a5aec-c142-49fa-bece-020038485089\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.677754 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q8jk\" (UniqueName: \"kubernetes.io/projected/91c32695-2077-4074-9e11-424ea074c4a6-kube-api-access-5q8jk\") pod \"nmstate-webhook-f8fb84555-p84qq\" (UID: \"91c32695-2077-4074-9e11-424ea074c4a6\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-p84qq" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.678088 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6f0a5aec-c142-49fa-bece-020038485089-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-ftvkt\" (UID: \"6f0a5aec-c142-49fa-bece-020038485089\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.678128 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/91c32695-2077-4074-9e11-424ea074c4a6-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-p84qq\" (UID: \"91c32695-2077-4074-9e11-424ea074c4a6\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-p84qq" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.678203 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gvrw\" (UniqueName: \"kubernetes.io/projected/6f0a5aec-c142-49fa-bece-020038485089-kube-api-access-6gvrw\") pod \"nmstate-console-plugin-6ff7998486-ftvkt\" (UID: \"6f0a5aec-c142-49fa-bece-020038485089\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.678481 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/6f0a5aec-c142-49fa-bece-020038485089-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-ftvkt\" (UID: \"6f0a5aec-c142-49fa-bece-020038485089\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.681669 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/91c32695-2077-4074-9e11-424ea074c4a6-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-p84qq\" (UID: \"91c32695-2077-4074-9e11-424ea074c4a6\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-p84qq" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.681757 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6f0a5aec-c142-49fa-bece-020038485089-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-ftvkt\" (UID: \"6f0a5aec-c142-49fa-bece-020038485089\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.694142 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q8jk\" (UniqueName: \"kubernetes.io/projected/91c32695-2077-4074-9e11-424ea074c4a6-kube-api-access-5q8jk\") pod \"nmstate-webhook-f8fb84555-p84qq\" (UID: \"91c32695-2077-4074-9e11-424ea074c4a6\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-p84qq" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.694992 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gvrw\" (UniqueName: \"kubernetes.io/projected/6f0a5aec-c142-49fa-bece-020038485089-kube-api-access-6gvrw\") pod \"nmstate-console-plugin-6ff7998486-ftvkt\" (UID: \"6f0a5aec-c142-49fa-bece-020038485089\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.716837 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-58fcf6886f-wkkdv"] Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.717669 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.728305 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-85swm" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.730668 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-58fcf6886f-wkkdv"] Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.753656 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.761561 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-p84qq" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.779411 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1a893b27-292d-4c79-94bf-c72866866e1d-oauth-serving-cert\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.779544 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1a893b27-292d-4c79-94bf-c72866866e1d-service-ca\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.779599 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a893b27-292d-4c79-94bf-c72866866e1d-trusted-ca-bundle\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.779642 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1a893b27-292d-4c79-94bf-c72866866e1d-console-oauth-config\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.779674 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1a893b27-292d-4c79-94bf-c72866866e1d-console-serving-cert\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.779693 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4p8v\" (UniqueName: \"kubernetes.io/projected/1a893b27-292d-4c79-94bf-c72866866e1d-kube-api-access-x4p8v\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.779719 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1a893b27-292d-4c79-94bf-c72866866e1d-console-config\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: W1213 06:56:14.785533 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c4e58d7_38d7_499f_91bf_eda13e345a14.slice/crio-603b7d8cff86d60e65e7831a7bc2f0e5c93557a42cebfd757cfeaa820fe9d93c WatchSource:0}: Error finding container 603b7d8cff86d60e65e7831a7bc2f0e5c93557a42cebfd757cfeaa820fe9d93c: Status 404 returned error can't find the container with id 603b7d8cff86d60e65e7831a7bc2f0e5c93557a42cebfd757cfeaa820fe9d93c Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.849838 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.881172 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1a893b27-292d-4c79-94bf-c72866866e1d-service-ca\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.881227 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a893b27-292d-4c79-94bf-c72866866e1d-trusted-ca-bundle\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.881252 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1a893b27-292d-4c79-94bf-c72866866e1d-console-oauth-config\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.881270 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1a893b27-292d-4c79-94bf-c72866866e1d-console-serving-cert\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.881284 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4p8v\" (UniqueName: \"kubernetes.io/projected/1a893b27-292d-4c79-94bf-c72866866e1d-kube-api-access-x4p8v\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.881303 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1a893b27-292d-4c79-94bf-c72866866e1d-console-config\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.881336 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1a893b27-292d-4c79-94bf-c72866866e1d-oauth-serving-cert\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.882514 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1a893b27-292d-4c79-94bf-c72866866e1d-oauth-serving-cert\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.883047 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1a893b27-292d-4c79-94bf-c72866866e1d-service-ca\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.883557 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1a893b27-292d-4c79-94bf-c72866866e1d-console-config\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.884667 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a893b27-292d-4c79-94bf-c72866866e1d-trusted-ca-bundle\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.886533 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1a893b27-292d-4c79-94bf-c72866866e1d-console-oauth-config\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.886756 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1a893b27-292d-4c79-94bf-c72866866e1d-console-serving-cert\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.900384 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4p8v\" (UniqueName: \"kubernetes.io/projected/1a893b27-292d-4c79-94bf-c72866866e1d-kube-api-access-x4p8v\") pod \"console-58fcf6886f-wkkdv\" (UID: \"1a893b27-292d-4c79-94bf-c72866866e1d\") " pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.922873 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-85swm"] Dec 13 06:56:14 crc kubenswrapper[4644]: I1213 06:56:14.958481 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-p84qq"] Dec 13 06:56:14 crc kubenswrapper[4644]: W1213 06:56:14.963669 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod91c32695_2077_4074_9e11_424ea074c4a6.slice/crio-273b0e52e655de62084eeb161dfcc5dd9bf889558e4e16f99996822af2dacf08 WatchSource:0}: Error finding container 273b0e52e655de62084eeb161dfcc5dd9bf889558e4e16f99996822af2dacf08: Status 404 returned error can't find the container with id 273b0e52e655de62084eeb161dfcc5dd9bf889558e4e16f99996822af2dacf08 Dec 13 06:56:15 crc kubenswrapper[4644]: I1213 06:56:15.017777 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt"] Dec 13 06:56:15 crc kubenswrapper[4644]: W1213 06:56:15.021900 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f0a5aec_c142_49fa_bece_020038485089.slice/crio-7d227e4a6d57576053cccdb2889f666075df4bdec1a03512789e1bdbfd59042f WatchSource:0}: Error finding container 7d227e4a6d57576053cccdb2889f666075df4bdec1a03512789e1bdbfd59042f: Status 404 returned error can't find the container with id 7d227e4a6d57576053cccdb2889f666075df4bdec1a03512789e1bdbfd59042f Dec 13 06:56:15 crc kubenswrapper[4644]: I1213 06:56:15.033496 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:15 crc kubenswrapper[4644]: I1213 06:56:15.385189 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-58fcf6886f-wkkdv"] Dec 13 06:56:15 crc kubenswrapper[4644]: W1213 06:56:15.390972 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1a893b27_292d_4c79_94bf_c72866866e1d.slice/crio-c79a36480d5a41c009122adb264b59886cfeccf4f241b875b87e495bef5754bd WatchSource:0}: Error finding container c79a36480d5a41c009122adb264b59886cfeccf4f241b875b87e495bef5754bd: Status 404 returned error can't find the container with id c79a36480d5a41c009122adb264b59886cfeccf4f241b875b87e495bef5754bd Dec 13 06:56:15 crc kubenswrapper[4644]: I1213 06:56:15.620180 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-ck7qt" event={"ID":"4c4e58d7-38d7-499f-91bf-eda13e345a14","Type":"ContainerStarted","Data":"603b7d8cff86d60e65e7831a7bc2f0e5c93557a42cebfd757cfeaa820fe9d93c"} Dec 13 06:56:15 crc kubenswrapper[4644]: I1213 06:56:15.622141 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-85swm" event={"ID":"4a8400f2-9d22-4c57-95fc-9f9a4a7bd0d7","Type":"ContainerStarted","Data":"9309146c953dee4edb5b17c222c3a01ad97b13604f01070151c2ff13f2e828f9"} Dec 13 06:56:15 crc kubenswrapper[4644]: I1213 06:56:15.623162 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-p84qq" event={"ID":"91c32695-2077-4074-9e11-424ea074c4a6","Type":"ContainerStarted","Data":"273b0e52e655de62084eeb161dfcc5dd9bf889558e4e16f99996822af2dacf08"} Dec 13 06:56:15 crc kubenswrapper[4644]: I1213 06:56:15.624839 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-58fcf6886f-wkkdv" event={"ID":"1a893b27-292d-4c79-94bf-c72866866e1d","Type":"ContainerStarted","Data":"5c9de187f1fbc5ce97ca5bb8b1491b8d717998c6cde57ba83986bb32ae482bd4"} Dec 13 06:56:15 crc kubenswrapper[4644]: I1213 06:56:15.624876 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-58fcf6886f-wkkdv" event={"ID":"1a893b27-292d-4c79-94bf-c72866866e1d","Type":"ContainerStarted","Data":"c79a36480d5a41c009122adb264b59886cfeccf4f241b875b87e495bef5754bd"} Dec 13 06:56:15 crc kubenswrapper[4644]: I1213 06:56:15.625936 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt" event={"ID":"6f0a5aec-c142-49fa-bece-020038485089","Type":"ContainerStarted","Data":"7d227e4a6d57576053cccdb2889f666075df4bdec1a03512789e1bdbfd59042f"} Dec 13 06:56:15 crc kubenswrapper[4644]: I1213 06:56:15.643865 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-58fcf6886f-wkkdv" podStartSLOduration=1.6438425909999999 podStartE2EDuration="1.643842591s" podCreationTimestamp="2025-12-13 06:56:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:56:15.638953365 +0000 UTC m=+637.853904199" watchObservedRunningTime="2025-12-13 06:56:15.643842591 +0000 UTC m=+637.858793424" Dec 13 06:56:18 crc kubenswrapper[4644]: I1213 06:56:18.647918 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt" event={"ID":"6f0a5aec-c142-49fa-bece-020038485089","Type":"ContainerStarted","Data":"de11991f574b15109d1e97502dfea83f3ae338006d8f9ca8cd3681c04681a449"} Dec 13 06:56:18 crc kubenswrapper[4644]: I1213 06:56:18.649740 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-ck7qt" event={"ID":"4c4e58d7-38d7-499f-91bf-eda13e345a14","Type":"ContainerStarted","Data":"eb33b7f7716a235f4c4a9b64c26386edb0e319423f73fa8ed23a6794cc50c942"} Dec 13 06:56:18 crc kubenswrapper[4644]: I1213 06:56:18.649873 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:18 crc kubenswrapper[4644]: I1213 06:56:18.651317 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-85swm" event={"ID":"4a8400f2-9d22-4c57-95fc-9f9a4a7bd0d7","Type":"ContainerStarted","Data":"1587b6bef39f1c531c26977fd38618ead21e0fc9ce35c9ed7385e9603879a59a"} Dec 13 06:56:18 crc kubenswrapper[4644]: I1213 06:56:18.652592 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-p84qq" event={"ID":"91c32695-2077-4074-9e11-424ea074c4a6","Type":"ContainerStarted","Data":"efd08c7a781ddb9bd97024d7250b8b35b9e85e0ec99f9fb5a022b28650be8e0a"} Dec 13 06:56:18 crc kubenswrapper[4644]: I1213 06:56:18.652771 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-f8fb84555-p84qq" Dec 13 06:56:18 crc kubenswrapper[4644]: I1213 06:56:18.669609 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-ftvkt" podStartSLOduration=1.858135838 podStartE2EDuration="4.669590078s" podCreationTimestamp="2025-12-13 06:56:14 +0000 UTC" firstStartedPulling="2025-12-13 06:56:15.024066168 +0000 UTC m=+637.239017001" lastFinishedPulling="2025-12-13 06:56:17.835520408 +0000 UTC m=+640.050471241" observedRunningTime="2025-12-13 06:56:18.664844902 +0000 UTC m=+640.879795735" watchObservedRunningTime="2025-12-13 06:56:18.669590078 +0000 UTC m=+640.884540911" Dec 13 06:56:18 crc kubenswrapper[4644]: I1213 06:56:18.682125 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-ck7qt" podStartSLOduration=1.6360382900000001 podStartE2EDuration="4.682106093s" podCreationTimestamp="2025-12-13 06:56:14 +0000 UTC" firstStartedPulling="2025-12-13 06:56:14.793943392 +0000 UTC m=+637.008894225" lastFinishedPulling="2025-12-13 06:56:17.840011194 +0000 UTC m=+640.054962028" observedRunningTime="2025-12-13 06:56:18.677075641 +0000 UTC m=+640.892026474" watchObservedRunningTime="2025-12-13 06:56:18.682106093 +0000 UTC m=+640.897056926" Dec 13 06:56:20 crc kubenswrapper[4644]: I1213 06:56:20.665072 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-85swm" event={"ID":"4a8400f2-9d22-4c57-95fc-9f9a4a7bd0d7","Type":"ContainerStarted","Data":"602caaef54b66bb343268988d4e21fbe6244f70e74395bb1f4a3576d6ddb9c7e"} Dec 13 06:56:20 crc kubenswrapper[4644]: I1213 06:56:20.681764 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-85swm" podStartSLOduration=1.8469730549999999 podStartE2EDuration="6.681742056s" podCreationTimestamp="2025-12-13 06:56:14 +0000 UTC" firstStartedPulling="2025-12-13 06:56:14.929152639 +0000 UTC m=+637.144103471" lastFinishedPulling="2025-12-13 06:56:19.763921638 +0000 UTC m=+641.978872472" observedRunningTime="2025-12-13 06:56:20.678088993 +0000 UTC m=+642.893039826" watchObservedRunningTime="2025-12-13 06:56:20.681742056 +0000 UTC m=+642.896692889" Dec 13 06:56:20 crc kubenswrapper[4644]: I1213 06:56:20.681870 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-f8fb84555-p84qq" podStartSLOduration=3.8046575430000003 podStartE2EDuration="6.681864647s" podCreationTimestamp="2025-12-13 06:56:14 +0000 UTC" firstStartedPulling="2025-12-13 06:56:14.966995977 +0000 UTC m=+637.181946809" lastFinishedPulling="2025-12-13 06:56:17.84420308 +0000 UTC m=+640.059153913" observedRunningTime="2025-12-13 06:56:18.704581589 +0000 UTC m=+640.919532422" watchObservedRunningTime="2025-12-13 06:56:20.681864647 +0000 UTC m=+642.896815480" Dec 13 06:56:24 crc kubenswrapper[4644]: I1213 06:56:24.774678 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-ck7qt" Dec 13 06:56:25 crc kubenswrapper[4644]: I1213 06:56:25.034923 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:25 crc kubenswrapper[4644]: I1213 06:56:25.034989 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:25 crc kubenswrapper[4644]: I1213 06:56:25.039995 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:25 crc kubenswrapper[4644]: I1213 06:56:25.699862 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-58fcf6886f-wkkdv" Dec 13 06:56:25 crc kubenswrapper[4644]: I1213 06:56:25.741429 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-wfsz4"] Dec 13 06:56:34 crc kubenswrapper[4644]: I1213 06:56:34.767552 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-f8fb84555-p84qq" Dec 13 06:56:44 crc kubenswrapper[4644]: I1213 06:56:44.828754 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht"] Dec 13 06:56:44 crc kubenswrapper[4644]: I1213 06:56:44.830110 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" Dec 13 06:56:44 crc kubenswrapper[4644]: I1213 06:56:44.831832 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 13 06:56:44 crc kubenswrapper[4644]: I1213 06:56:44.841482 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht"] Dec 13 06:56:44 crc kubenswrapper[4644]: I1213 06:56:44.982394 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwnws\" (UniqueName: \"kubernetes.io/projected/b633d946-5f6e-4256-92ce-166f05f71f51-kube-api-access-zwnws\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht\" (UID: \"b633d946-5f6e-4256-92ce-166f05f71f51\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" Dec 13 06:56:44 crc kubenswrapper[4644]: I1213 06:56:44.982628 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b633d946-5f6e-4256-92ce-166f05f71f51-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht\" (UID: \"b633d946-5f6e-4256-92ce-166f05f71f51\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" Dec 13 06:56:44 crc kubenswrapper[4644]: I1213 06:56:44.982658 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b633d946-5f6e-4256-92ce-166f05f71f51-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht\" (UID: \"b633d946-5f6e-4256-92ce-166f05f71f51\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" Dec 13 06:56:45 crc kubenswrapper[4644]: I1213 06:56:45.083301 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b633d946-5f6e-4256-92ce-166f05f71f51-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht\" (UID: \"b633d946-5f6e-4256-92ce-166f05f71f51\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" Dec 13 06:56:45 crc kubenswrapper[4644]: I1213 06:56:45.083345 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b633d946-5f6e-4256-92ce-166f05f71f51-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht\" (UID: \"b633d946-5f6e-4256-92ce-166f05f71f51\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" Dec 13 06:56:45 crc kubenswrapper[4644]: I1213 06:56:45.083372 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwnws\" (UniqueName: \"kubernetes.io/projected/b633d946-5f6e-4256-92ce-166f05f71f51-kube-api-access-zwnws\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht\" (UID: \"b633d946-5f6e-4256-92ce-166f05f71f51\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" Dec 13 06:56:45 crc kubenswrapper[4644]: I1213 06:56:45.083966 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b633d946-5f6e-4256-92ce-166f05f71f51-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht\" (UID: \"b633d946-5f6e-4256-92ce-166f05f71f51\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" Dec 13 06:56:45 crc kubenswrapper[4644]: I1213 06:56:45.084046 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b633d946-5f6e-4256-92ce-166f05f71f51-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht\" (UID: \"b633d946-5f6e-4256-92ce-166f05f71f51\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" Dec 13 06:56:45 crc kubenswrapper[4644]: I1213 06:56:45.098960 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwnws\" (UniqueName: \"kubernetes.io/projected/b633d946-5f6e-4256-92ce-166f05f71f51-kube-api-access-zwnws\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht\" (UID: \"b633d946-5f6e-4256-92ce-166f05f71f51\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" Dec 13 06:56:45 crc kubenswrapper[4644]: I1213 06:56:45.145244 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" Dec 13 06:56:45 crc kubenswrapper[4644]: I1213 06:56:45.482461 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht"] Dec 13 06:56:45 crc kubenswrapper[4644]: I1213 06:56:45.798374 4644 generic.go:334] "Generic (PLEG): container finished" podID="b633d946-5f6e-4256-92ce-166f05f71f51" containerID="d2d045b4a4ab41b95da23c021662bf6207eb944ea64e2deaced6a9f36a98ef20" exitCode=0 Dec 13 06:56:45 crc kubenswrapper[4644]: I1213 06:56:45.798482 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" event={"ID":"b633d946-5f6e-4256-92ce-166f05f71f51","Type":"ContainerDied","Data":"d2d045b4a4ab41b95da23c021662bf6207eb944ea64e2deaced6a9f36a98ef20"} Dec 13 06:56:45 crc kubenswrapper[4644]: I1213 06:56:45.798628 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" event={"ID":"b633d946-5f6e-4256-92ce-166f05f71f51","Type":"ContainerStarted","Data":"17e5419225e77e2d631fe30d7db9d7864d04f320abb23fa396abd7445a3a5e90"} Dec 13 06:56:47 crc kubenswrapper[4644]: I1213 06:56:47.809794 4644 generic.go:334] "Generic (PLEG): container finished" podID="b633d946-5f6e-4256-92ce-166f05f71f51" containerID="02321d98b77f4a26082fadf8e3fe6dac8fe94e575cadf9b83ed0c85f75323557" exitCode=0 Dec 13 06:56:47 crc kubenswrapper[4644]: I1213 06:56:47.809850 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" event={"ID":"b633d946-5f6e-4256-92ce-166f05f71f51","Type":"ContainerDied","Data":"02321d98b77f4a26082fadf8e3fe6dac8fe94e575cadf9b83ed0c85f75323557"} Dec 13 06:56:48 crc kubenswrapper[4644]: I1213 06:56:48.816383 4644 generic.go:334] "Generic (PLEG): container finished" podID="b633d946-5f6e-4256-92ce-166f05f71f51" containerID="199e7f130695ba7b367fc248886642610d8fdf70d7d8a18434fd9c1dc7f3ccd1" exitCode=0 Dec 13 06:56:48 crc kubenswrapper[4644]: I1213 06:56:48.816459 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" event={"ID":"b633d946-5f6e-4256-92ce-166f05f71f51","Type":"ContainerDied","Data":"199e7f130695ba7b367fc248886642610d8fdf70d7d8a18434fd9c1dc7f3ccd1"} Dec 13 06:56:50 crc kubenswrapper[4644]: I1213 06:56:50.017964 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" Dec 13 06:56:50 crc kubenswrapper[4644]: I1213 06:56:50.153209 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwnws\" (UniqueName: \"kubernetes.io/projected/b633d946-5f6e-4256-92ce-166f05f71f51-kube-api-access-zwnws\") pod \"b633d946-5f6e-4256-92ce-166f05f71f51\" (UID: \"b633d946-5f6e-4256-92ce-166f05f71f51\") " Dec 13 06:56:50 crc kubenswrapper[4644]: I1213 06:56:50.153264 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b633d946-5f6e-4256-92ce-166f05f71f51-util\") pod \"b633d946-5f6e-4256-92ce-166f05f71f51\" (UID: \"b633d946-5f6e-4256-92ce-166f05f71f51\") " Dec 13 06:56:50 crc kubenswrapper[4644]: I1213 06:56:50.153312 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b633d946-5f6e-4256-92ce-166f05f71f51-bundle\") pod \"b633d946-5f6e-4256-92ce-166f05f71f51\" (UID: \"b633d946-5f6e-4256-92ce-166f05f71f51\") " Dec 13 06:56:50 crc kubenswrapper[4644]: I1213 06:56:50.154310 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b633d946-5f6e-4256-92ce-166f05f71f51-bundle" (OuterVolumeSpecName: "bundle") pod "b633d946-5f6e-4256-92ce-166f05f71f51" (UID: "b633d946-5f6e-4256-92ce-166f05f71f51"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:56:50 crc kubenswrapper[4644]: I1213 06:56:50.159232 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b633d946-5f6e-4256-92ce-166f05f71f51-kube-api-access-zwnws" (OuterVolumeSpecName: "kube-api-access-zwnws") pod "b633d946-5f6e-4256-92ce-166f05f71f51" (UID: "b633d946-5f6e-4256-92ce-166f05f71f51"). InnerVolumeSpecName "kube-api-access-zwnws". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:56:50 crc kubenswrapper[4644]: I1213 06:56:50.163300 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b633d946-5f6e-4256-92ce-166f05f71f51-util" (OuterVolumeSpecName: "util") pod "b633d946-5f6e-4256-92ce-166f05f71f51" (UID: "b633d946-5f6e-4256-92ce-166f05f71f51"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:56:50 crc kubenswrapper[4644]: I1213 06:56:50.254944 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwnws\" (UniqueName: \"kubernetes.io/projected/b633d946-5f6e-4256-92ce-166f05f71f51-kube-api-access-zwnws\") on node \"crc\" DevicePath \"\"" Dec 13 06:56:50 crc kubenswrapper[4644]: I1213 06:56:50.254977 4644 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b633d946-5f6e-4256-92ce-166f05f71f51-util\") on node \"crc\" DevicePath \"\"" Dec 13 06:56:50 crc kubenswrapper[4644]: I1213 06:56:50.254988 4644 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b633d946-5f6e-4256-92ce-166f05f71f51-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 06:56:50 crc kubenswrapper[4644]: I1213 06:56:50.769350 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-wfsz4" podUID="ac23aa18-ed6d-4ea9-b720-aa7ccb164459" containerName="console" containerID="cri-o://e7f2007f498537b84451941a1b217de201026e52e67797c93083af130ea34ead" gracePeriod=15 Dec 13 06:56:50 crc kubenswrapper[4644]: I1213 06:56:50.827646 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" event={"ID":"b633d946-5f6e-4256-92ce-166f05f71f51","Type":"ContainerDied","Data":"17e5419225e77e2d631fe30d7db9d7864d04f320abb23fa396abd7445a3a5e90"} Dec 13 06:56:50 crc kubenswrapper[4644]: I1213 06:56:50.827670 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht" Dec 13 06:56:50 crc kubenswrapper[4644]: I1213 06:56:50.827684 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="17e5419225e77e2d631fe30d7db9d7864d04f320abb23fa396abd7445a3a5e90" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.067061 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-wfsz4_ac23aa18-ed6d-4ea9-b720-aa7ccb164459/console/0.log" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.067128 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.165296 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-serving-cert\") pod \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.165642 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-config\") pod \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.165691 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-service-ca\") pod \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.165741 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-trusted-ca-bundle\") pod \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.165770 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-oauth-config\") pod \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.165790 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7v52\" (UniqueName: \"kubernetes.io/projected/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-kube-api-access-k7v52\") pod \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.165816 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-oauth-serving-cert\") pod \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\" (UID: \"ac23aa18-ed6d-4ea9-b720-aa7ccb164459\") " Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.166718 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-config" (OuterVolumeSpecName: "console-config") pod "ac23aa18-ed6d-4ea9-b720-aa7ccb164459" (UID: "ac23aa18-ed6d-4ea9-b720-aa7ccb164459"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.167083 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-service-ca" (OuterVolumeSpecName: "service-ca") pod "ac23aa18-ed6d-4ea9-b720-aa7ccb164459" (UID: "ac23aa18-ed6d-4ea9-b720-aa7ccb164459"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.167121 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "ac23aa18-ed6d-4ea9-b720-aa7ccb164459" (UID: "ac23aa18-ed6d-4ea9-b720-aa7ccb164459"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.167561 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "ac23aa18-ed6d-4ea9-b720-aa7ccb164459" (UID: "ac23aa18-ed6d-4ea9-b720-aa7ccb164459"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.171650 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "ac23aa18-ed6d-4ea9-b720-aa7ccb164459" (UID: "ac23aa18-ed6d-4ea9-b720-aa7ccb164459"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.171650 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-kube-api-access-k7v52" (OuterVolumeSpecName: "kube-api-access-k7v52") pod "ac23aa18-ed6d-4ea9-b720-aa7ccb164459" (UID: "ac23aa18-ed6d-4ea9-b720-aa7ccb164459"). InnerVolumeSpecName "kube-api-access-k7v52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.172279 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "ac23aa18-ed6d-4ea9-b720-aa7ccb164459" (UID: "ac23aa18-ed6d-4ea9-b720-aa7ccb164459"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.267578 4644 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.267629 4644 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.267641 4644 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.267650 4644 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.267663 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7v52\" (UniqueName: \"kubernetes.io/projected/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-kube-api-access-k7v52\") on node \"crc\" DevicePath \"\"" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.267674 4644 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.267683 4644 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ac23aa18-ed6d-4ea9-b720-aa7ccb164459-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.833309 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-wfsz4_ac23aa18-ed6d-4ea9-b720-aa7ccb164459/console/0.log" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.833354 4644 generic.go:334] "Generic (PLEG): container finished" podID="ac23aa18-ed6d-4ea9-b720-aa7ccb164459" containerID="e7f2007f498537b84451941a1b217de201026e52e67797c93083af130ea34ead" exitCode=2 Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.833381 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wfsz4" event={"ID":"ac23aa18-ed6d-4ea9-b720-aa7ccb164459","Type":"ContainerDied","Data":"e7f2007f498537b84451941a1b217de201026e52e67797c93083af130ea34ead"} Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.833421 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wfsz4" event={"ID":"ac23aa18-ed6d-4ea9-b720-aa7ccb164459","Type":"ContainerDied","Data":"7c28f4d483df4b9e209f42b0791c3dc4b4a886df284819842ead372469a2387f"} Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.833420 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wfsz4" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.833434 4644 scope.go:117] "RemoveContainer" containerID="e7f2007f498537b84451941a1b217de201026e52e67797c93083af130ea34ead" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.847157 4644 scope.go:117] "RemoveContainer" containerID="e7f2007f498537b84451941a1b217de201026e52e67797c93083af130ea34ead" Dec 13 06:56:51 crc kubenswrapper[4644]: E1213 06:56:51.847393 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7f2007f498537b84451941a1b217de201026e52e67797c93083af130ea34ead\": container with ID starting with e7f2007f498537b84451941a1b217de201026e52e67797c93083af130ea34ead not found: ID does not exist" containerID="e7f2007f498537b84451941a1b217de201026e52e67797c93083af130ea34ead" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.847426 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7f2007f498537b84451941a1b217de201026e52e67797c93083af130ea34ead"} err="failed to get container status \"e7f2007f498537b84451941a1b217de201026e52e67797c93083af130ea34ead\": rpc error: code = NotFound desc = could not find container \"e7f2007f498537b84451941a1b217de201026e52e67797c93083af130ea34ead\": container with ID starting with e7f2007f498537b84451941a1b217de201026e52e67797c93083af130ea34ead not found: ID does not exist" Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.852661 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-wfsz4"] Dec 13 06:56:51 crc kubenswrapper[4644]: I1213 06:56:51.855581 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-wfsz4"] Dec 13 06:56:52 crc kubenswrapper[4644]: I1213 06:56:52.394790 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac23aa18-ed6d-4ea9-b720-aa7ccb164459" path="/var/lib/kubelet/pods/ac23aa18-ed6d-4ea9-b720-aa7ccb164459/volumes" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.322651 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q"] Dec 13 06:56:59 crc kubenswrapper[4644]: E1213 06:56:59.323131 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b633d946-5f6e-4256-92ce-166f05f71f51" containerName="extract" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.323144 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="b633d946-5f6e-4256-92ce-166f05f71f51" containerName="extract" Dec 13 06:56:59 crc kubenswrapper[4644]: E1213 06:56:59.323157 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b633d946-5f6e-4256-92ce-166f05f71f51" containerName="util" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.323163 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="b633d946-5f6e-4256-92ce-166f05f71f51" containerName="util" Dec 13 06:56:59 crc kubenswrapper[4644]: E1213 06:56:59.323175 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b633d946-5f6e-4256-92ce-166f05f71f51" containerName="pull" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.323181 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="b633d946-5f6e-4256-92ce-166f05f71f51" containerName="pull" Dec 13 06:56:59 crc kubenswrapper[4644]: E1213 06:56:59.323195 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac23aa18-ed6d-4ea9-b720-aa7ccb164459" containerName="console" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.323200 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac23aa18-ed6d-4ea9-b720-aa7ccb164459" containerName="console" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.323277 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac23aa18-ed6d-4ea9-b720-aa7ccb164459" containerName="console" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.323288 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="b633d946-5f6e-4256-92ce-166f05f71f51" containerName="extract" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.323648 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.325426 4644 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.325836 4644 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.325923 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.330326 4644 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-kn8n5" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.330595 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.341106 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q"] Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.479414 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9ad3f0c3-9ec1-43d4-8d56-16982c14ba46-apiservice-cert\") pod \"metallb-operator-controller-manager-855484b47c-v5c5q\" (UID: \"9ad3f0c3-9ec1-43d4-8d56-16982c14ba46\") " pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.479643 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2hgd\" (UniqueName: \"kubernetes.io/projected/9ad3f0c3-9ec1-43d4-8d56-16982c14ba46-kube-api-access-w2hgd\") pod \"metallb-operator-controller-manager-855484b47c-v5c5q\" (UID: \"9ad3f0c3-9ec1-43d4-8d56-16982c14ba46\") " pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.479704 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9ad3f0c3-9ec1-43d4-8d56-16982c14ba46-webhook-cert\") pod \"metallb-operator-controller-manager-855484b47c-v5c5q\" (UID: \"9ad3f0c3-9ec1-43d4-8d56-16982c14ba46\") " pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.580821 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9ad3f0c3-9ec1-43d4-8d56-16982c14ba46-apiservice-cert\") pod \"metallb-operator-controller-manager-855484b47c-v5c5q\" (UID: \"9ad3f0c3-9ec1-43d4-8d56-16982c14ba46\") " pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.581382 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2hgd\" (UniqueName: \"kubernetes.io/projected/9ad3f0c3-9ec1-43d4-8d56-16982c14ba46-kube-api-access-w2hgd\") pod \"metallb-operator-controller-manager-855484b47c-v5c5q\" (UID: \"9ad3f0c3-9ec1-43d4-8d56-16982c14ba46\") " pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.581513 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9ad3f0c3-9ec1-43d4-8d56-16982c14ba46-webhook-cert\") pod \"metallb-operator-controller-manager-855484b47c-v5c5q\" (UID: \"9ad3f0c3-9ec1-43d4-8d56-16982c14ba46\") " pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.598230 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9ad3f0c3-9ec1-43d4-8d56-16982c14ba46-apiservice-cert\") pod \"metallb-operator-controller-manager-855484b47c-v5c5q\" (UID: \"9ad3f0c3-9ec1-43d4-8d56-16982c14ba46\") " pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.599003 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9ad3f0c3-9ec1-43d4-8d56-16982c14ba46-webhook-cert\") pod \"metallb-operator-controller-manager-855484b47c-v5c5q\" (UID: \"9ad3f0c3-9ec1-43d4-8d56-16982c14ba46\") " pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.599260 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2hgd\" (UniqueName: \"kubernetes.io/projected/9ad3f0c3-9ec1-43d4-8d56-16982c14ba46-kube-api-access-w2hgd\") pod \"metallb-operator-controller-manager-855484b47c-v5c5q\" (UID: \"9ad3f0c3-9ec1-43d4-8d56-16982c14ba46\") " pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.639898 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.749917 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v"] Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.751152 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.756661 4644 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.756868 4644 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.757091 4644 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-4wftc" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.775670 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v"] Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.863905 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q"] Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.888372 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0e78bd18-1038-45a4-989b-cd5fab723a89-apiservice-cert\") pod \"metallb-operator-webhook-server-7dbf74bd54-pqt2v\" (UID: \"0e78bd18-1038-45a4-989b-cd5fab723a89\") " pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.888433 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbsbt\" (UniqueName: \"kubernetes.io/projected/0e78bd18-1038-45a4-989b-cd5fab723a89-kube-api-access-wbsbt\") pod \"metallb-operator-webhook-server-7dbf74bd54-pqt2v\" (UID: \"0e78bd18-1038-45a4-989b-cd5fab723a89\") " pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.888511 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0e78bd18-1038-45a4-989b-cd5fab723a89-webhook-cert\") pod \"metallb-operator-webhook-server-7dbf74bd54-pqt2v\" (UID: \"0e78bd18-1038-45a4-989b-cd5fab723a89\") " pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.990404 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0e78bd18-1038-45a4-989b-cd5fab723a89-webhook-cert\") pod \"metallb-operator-webhook-server-7dbf74bd54-pqt2v\" (UID: \"0e78bd18-1038-45a4-989b-cd5fab723a89\") " pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.990661 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0e78bd18-1038-45a4-989b-cd5fab723a89-apiservice-cert\") pod \"metallb-operator-webhook-server-7dbf74bd54-pqt2v\" (UID: \"0e78bd18-1038-45a4-989b-cd5fab723a89\") " pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.990697 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbsbt\" (UniqueName: \"kubernetes.io/projected/0e78bd18-1038-45a4-989b-cd5fab723a89-kube-api-access-wbsbt\") pod \"metallb-operator-webhook-server-7dbf74bd54-pqt2v\" (UID: \"0e78bd18-1038-45a4-989b-cd5fab723a89\") " pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.996508 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0e78bd18-1038-45a4-989b-cd5fab723a89-webhook-cert\") pod \"metallb-operator-webhook-server-7dbf74bd54-pqt2v\" (UID: \"0e78bd18-1038-45a4-989b-cd5fab723a89\") " pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" Dec 13 06:56:59 crc kubenswrapper[4644]: I1213 06:56:59.996551 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0e78bd18-1038-45a4-989b-cd5fab723a89-apiservice-cert\") pod \"metallb-operator-webhook-server-7dbf74bd54-pqt2v\" (UID: \"0e78bd18-1038-45a4-989b-cd5fab723a89\") " pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" Dec 13 06:57:00 crc kubenswrapper[4644]: I1213 06:57:00.007313 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbsbt\" (UniqueName: \"kubernetes.io/projected/0e78bd18-1038-45a4-989b-cd5fab723a89-kube-api-access-wbsbt\") pod \"metallb-operator-webhook-server-7dbf74bd54-pqt2v\" (UID: \"0e78bd18-1038-45a4-989b-cd5fab723a89\") " pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" Dec 13 06:57:00 crc kubenswrapper[4644]: I1213 06:57:00.090992 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" Dec 13 06:57:00 crc kubenswrapper[4644]: I1213 06:57:00.464013 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v"] Dec 13 06:57:00 crc kubenswrapper[4644]: W1213 06:57:00.468432 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e78bd18_1038_45a4_989b_cd5fab723a89.slice/crio-ce37b2e3b72a14df4d2a76e8291edfef4b3957300f4d1b27a5a375602de9e775 WatchSource:0}: Error finding container ce37b2e3b72a14df4d2a76e8291edfef4b3957300f4d1b27a5a375602de9e775: Status 404 returned error can't find the container with id ce37b2e3b72a14df4d2a76e8291edfef4b3957300f4d1b27a5a375602de9e775 Dec 13 06:57:00 crc kubenswrapper[4644]: I1213 06:57:00.878079 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" event={"ID":"9ad3f0c3-9ec1-43d4-8d56-16982c14ba46","Type":"ContainerStarted","Data":"3857479a7ca42ce5266752f5c8263354524326ac25bec3c0a675e6c9fb8ab2e0"} Dec 13 06:57:00 crc kubenswrapper[4644]: I1213 06:57:00.879273 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" event={"ID":"0e78bd18-1038-45a4-989b-cd5fab723a89","Type":"ContainerStarted","Data":"ce37b2e3b72a14df4d2a76e8291edfef4b3957300f4d1b27a5a375602de9e775"} Dec 13 06:57:04 crc kubenswrapper[4644]: I1213 06:57:04.902701 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" event={"ID":"0e78bd18-1038-45a4-989b-cd5fab723a89","Type":"ContainerStarted","Data":"7a315b4421e98a846580c341bf4de029d11047677c7549e057ff974908a9def9"} Dec 13 06:57:04 crc kubenswrapper[4644]: I1213 06:57:04.903294 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" Dec 13 06:57:04 crc kubenswrapper[4644]: I1213 06:57:04.904379 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" event={"ID":"9ad3f0c3-9ec1-43d4-8d56-16982c14ba46","Type":"ContainerStarted","Data":"7dbdfa88ab2d92148992b1ea798deee64d5ec584e384a1f77f6dcc54a0cdfd33"} Dec 13 06:57:04 crc kubenswrapper[4644]: I1213 06:57:04.904630 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" Dec 13 06:57:04 crc kubenswrapper[4644]: I1213 06:57:04.927262 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" podStartSLOduration=1.907681437 podStartE2EDuration="5.927243482s" podCreationTimestamp="2025-12-13 06:56:59 +0000 UTC" firstStartedPulling="2025-12-13 06:57:00.471756263 +0000 UTC m=+682.686707097" lastFinishedPulling="2025-12-13 06:57:04.491318309 +0000 UTC m=+686.706269142" observedRunningTime="2025-12-13 06:57:04.923544289 +0000 UTC m=+687.138495122" watchObservedRunningTime="2025-12-13 06:57:04.927243482 +0000 UTC m=+687.142194315" Dec 13 06:57:04 crc kubenswrapper[4644]: I1213 06:57:04.941864 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" podStartSLOduration=1.324448442 podStartE2EDuration="5.941839905s" podCreationTimestamp="2025-12-13 06:56:59 +0000 UTC" firstStartedPulling="2025-12-13 06:56:59.873293635 +0000 UTC m=+682.088244468" lastFinishedPulling="2025-12-13 06:57:04.490685099 +0000 UTC m=+686.705635931" observedRunningTime="2025-12-13 06:57:04.94066128 +0000 UTC m=+687.155612113" watchObservedRunningTime="2025-12-13 06:57:04.941839905 +0000 UTC m=+687.156790738" Dec 13 06:57:09 crc kubenswrapper[4644]: I1213 06:57:09.753793 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 06:57:09 crc kubenswrapper[4644]: I1213 06:57:09.754865 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 06:57:20 crc kubenswrapper[4644]: I1213 06:57:20.095069 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-7dbf74bd54-pqt2v" Dec 13 06:57:39 crc kubenswrapper[4644]: I1213 06:57:39.642542 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-855484b47c-v5c5q" Dec 13 06:57:39 crc kubenswrapper[4644]: I1213 06:57:39.753465 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 06:57:39 crc kubenswrapper[4644]: I1213 06:57:39.753542 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.186077 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-n2vrv"] Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.199626 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2"] Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.200037 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.201296 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.203509 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.203587 4644 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.203722 4644 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.204195 4644 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-zt49w" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.223743 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2"] Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.276618 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-lzmmt"] Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.277995 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-lzmmt" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.282821 4644 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.283146 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.283481 4644 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-4kj8x" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.283715 4644 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.324012 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5bddd4b946-55rq7"] Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.324912 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-55rq7" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.326325 4644 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.337517 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-metrics-certs\") pod \"speaker-lzmmt\" (UID: \"72b3e06f-eeaa-4db7-a2dd-ec97404219bb\") " pod="metallb-system/speaker-lzmmt" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.337563 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b2f2ae81-858e-49c7-9a13-00a35850e02d-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-ld5c2\" (UID: \"b2f2ae81-858e-49c7-9a13-00a35850e02d\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.337594 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxqlw\" (UniqueName: \"kubernetes.io/projected/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-kube-api-access-rxqlw\") pod \"speaker-lzmmt\" (UID: \"72b3e06f-eeaa-4db7-a2dd-ec97404219bb\") " pod="metallb-system/speaker-lzmmt" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.337682 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-frr-sockets\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.337699 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-frr-conf\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.337760 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rltxr\" (UniqueName: \"kubernetes.io/projected/b2f2ae81-858e-49c7-9a13-00a35850e02d-kube-api-access-rltxr\") pod \"frr-k8s-webhook-server-7784b6fcf-ld5c2\" (UID: \"b2f2ae81-858e-49c7-9a13-00a35850e02d\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.337776 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-reloader\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.337807 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-frr-startup\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.337821 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsrst\" (UniqueName: \"kubernetes.io/projected/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-kube-api-access-dsrst\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.337874 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-metrics\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.337902 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-memberlist\") pod \"speaker-lzmmt\" (UID: \"72b3e06f-eeaa-4db7-a2dd-ec97404219bb\") " pod="metallb-system/speaker-lzmmt" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.337915 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-metrics-certs\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.337939 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-metallb-excludel2\") pod \"speaker-lzmmt\" (UID: \"72b3e06f-eeaa-4db7-a2dd-ec97404219bb\") " pod="metallb-system/speaker-lzmmt" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.348992 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-55rq7"] Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.439546 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-metrics\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.439963 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-memberlist\") pod \"speaker-lzmmt\" (UID: \"72b3e06f-eeaa-4db7-a2dd-ec97404219bb\") " pod="metallb-system/speaker-lzmmt" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.440125 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-metrics-certs\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.440233 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-metallb-excludel2\") pod \"speaker-lzmmt\" (UID: \"72b3e06f-eeaa-4db7-a2dd-ec97404219bb\") " pod="metallb-system/speaker-lzmmt" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.440377 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-metrics-certs\") pod \"speaker-lzmmt\" (UID: \"72b3e06f-eeaa-4db7-a2dd-ec97404219bb\") " pod="metallb-system/speaker-lzmmt" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.440520 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b2f2ae81-858e-49c7-9a13-00a35850e02d-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-ld5c2\" (UID: \"b2f2ae81-858e-49c7-9a13-00a35850e02d\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.440007 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-metrics\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: E1213 06:57:40.440140 4644 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 13 06:57:40 crc kubenswrapper[4644]: E1213 06:57:40.440924 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-memberlist podName:72b3e06f-eeaa-4db7-a2dd-ec97404219bb nodeName:}" failed. No retries permitted until 2025-12-13 06:57:40.940893001 +0000 UTC m=+723.155843834 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-memberlist") pod "speaker-lzmmt" (UID: "72b3e06f-eeaa-4db7-a2dd-ec97404219bb") : secret "metallb-memberlist" not found Dec 13 06:57:40 crc kubenswrapper[4644]: E1213 06:57:40.440495 4644 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Dec 13 06:57:40 crc kubenswrapper[4644]: E1213 06:57:40.441090 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-metrics-certs podName:72b3e06f-eeaa-4db7-a2dd-ec97404219bb nodeName:}" failed. No retries permitted until 2025-12-13 06:57:40.941081113 +0000 UTC m=+723.156031947 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-metrics-certs") pod "speaker-lzmmt" (UID: "72b3e06f-eeaa-4db7-a2dd-ec97404219bb") : secret "speaker-certs-secret" not found Dec 13 06:57:40 crc kubenswrapper[4644]: E1213 06:57:40.440628 4644 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 13 06:57:40 crc kubenswrapper[4644]: E1213 06:57:40.441232 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b2f2ae81-858e-49c7-9a13-00a35850e02d-cert podName:b2f2ae81-858e-49c7-9a13-00a35850e02d nodeName:}" failed. No retries permitted until 2025-12-13 06:57:40.941225646 +0000 UTC m=+723.156176479 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b2f2ae81-858e-49c7-9a13-00a35850e02d-cert") pod "frr-k8s-webhook-server-7784b6fcf-ld5c2" (UID: "b2f2ae81-858e-49c7-9a13-00a35850e02d") : secret "frr-k8s-webhook-server-cert" not found Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.441141 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-metallb-excludel2\") pod \"speaker-lzmmt\" (UID: \"72b3e06f-eeaa-4db7-a2dd-ec97404219bb\") " pod="metallb-system/speaker-lzmmt" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.440668 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxqlw\" (UniqueName: \"kubernetes.io/projected/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-kube-api-access-rxqlw\") pod \"speaker-lzmmt\" (UID: \"72b3e06f-eeaa-4db7-a2dd-ec97404219bb\") " pod="metallb-system/speaker-lzmmt" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.441547 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-frr-sockets\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.441983 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-frr-conf\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.442115 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3125ef9d-0678-4f6c-9080-6b08ac39744a-cert\") pod \"controller-5bddd4b946-55rq7\" (UID: \"3125ef9d-0678-4f6c-9080-6b08ac39744a\") " pod="metallb-system/controller-5bddd4b946-55rq7" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.442214 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rltxr\" (UniqueName: \"kubernetes.io/projected/b2f2ae81-858e-49c7-9a13-00a35850e02d-kube-api-access-rltxr\") pod \"frr-k8s-webhook-server-7784b6fcf-ld5c2\" (UID: \"b2f2ae81-858e-49c7-9a13-00a35850e02d\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.442283 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3125ef9d-0678-4f6c-9080-6b08ac39744a-metrics-certs\") pod \"controller-5bddd4b946-55rq7\" (UID: \"3125ef9d-0678-4f6c-9080-6b08ac39744a\") " pod="metallb-system/controller-5bddd4b946-55rq7" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.442346 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-reloader\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.442454 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-frr-startup\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.442537 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsrst\" (UniqueName: \"kubernetes.io/projected/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-kube-api-access-dsrst\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.442644 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c997z\" (UniqueName: \"kubernetes.io/projected/3125ef9d-0678-4f6c-9080-6b08ac39744a-kube-api-access-c997z\") pod \"controller-5bddd4b946-55rq7\" (UID: \"3125ef9d-0678-4f6c-9080-6b08ac39744a\") " pod="metallb-system/controller-5bddd4b946-55rq7" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.441949 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-frr-sockets\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.443679 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-frr-conf\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.444045 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-reloader\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.444657 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-frr-startup\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.447832 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-metrics-certs\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.458898 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsrst\" (UniqueName: \"kubernetes.io/projected/e4fe3a30-e94f-40c6-b734-1385b3f14d4f-kube-api-access-dsrst\") pod \"frr-k8s-n2vrv\" (UID: \"e4fe3a30-e94f-40c6-b734-1385b3f14d4f\") " pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.459646 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxqlw\" (UniqueName: \"kubernetes.io/projected/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-kube-api-access-rxqlw\") pod \"speaker-lzmmt\" (UID: \"72b3e06f-eeaa-4db7-a2dd-ec97404219bb\") " pod="metallb-system/speaker-lzmmt" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.460198 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rltxr\" (UniqueName: \"kubernetes.io/projected/b2f2ae81-858e-49c7-9a13-00a35850e02d-kube-api-access-rltxr\") pod \"frr-k8s-webhook-server-7784b6fcf-ld5c2\" (UID: \"b2f2ae81-858e-49c7-9a13-00a35850e02d\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.526131 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.544894 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3125ef9d-0678-4f6c-9080-6b08ac39744a-cert\") pod \"controller-5bddd4b946-55rq7\" (UID: \"3125ef9d-0678-4f6c-9080-6b08ac39744a\") " pod="metallb-system/controller-5bddd4b946-55rq7" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.545038 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3125ef9d-0678-4f6c-9080-6b08ac39744a-metrics-certs\") pod \"controller-5bddd4b946-55rq7\" (UID: \"3125ef9d-0678-4f6c-9080-6b08ac39744a\") " pod="metallb-system/controller-5bddd4b946-55rq7" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.545144 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c997z\" (UniqueName: \"kubernetes.io/projected/3125ef9d-0678-4f6c-9080-6b08ac39744a-kube-api-access-c997z\") pod \"controller-5bddd4b946-55rq7\" (UID: \"3125ef9d-0678-4f6c-9080-6b08ac39744a\") " pod="metallb-system/controller-5bddd4b946-55rq7" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.546943 4644 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.547887 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3125ef9d-0678-4f6c-9080-6b08ac39744a-metrics-certs\") pod \"controller-5bddd4b946-55rq7\" (UID: \"3125ef9d-0678-4f6c-9080-6b08ac39744a\") " pod="metallb-system/controller-5bddd4b946-55rq7" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.558379 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c997z\" (UniqueName: \"kubernetes.io/projected/3125ef9d-0678-4f6c-9080-6b08ac39744a-kube-api-access-c997z\") pod \"controller-5bddd4b946-55rq7\" (UID: \"3125ef9d-0678-4f6c-9080-6b08ac39744a\") " pod="metallb-system/controller-5bddd4b946-55rq7" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.558995 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3125ef9d-0678-4f6c-9080-6b08ac39744a-cert\") pod \"controller-5bddd4b946-55rq7\" (UID: \"3125ef9d-0678-4f6c-9080-6b08ac39744a\") " pod="metallb-system/controller-5bddd4b946-55rq7" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.639637 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-55rq7" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.798966 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-55rq7"] Dec 13 06:57:40 crc kubenswrapper[4644]: W1213 06:57:40.811698 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3125ef9d_0678_4f6c_9080_6b08ac39744a.slice/crio-2cafd1d5f887a6cb1764a0e4e736477964506034756c061db7ab3a27c13ba2bc WatchSource:0}: Error finding container 2cafd1d5f887a6cb1764a0e4e736477964506034756c061db7ab3a27c13ba2bc: Status 404 returned error can't find the container with id 2cafd1d5f887a6cb1764a0e4e736477964506034756c061db7ab3a27c13ba2bc Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.951316 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-memberlist\") pod \"speaker-lzmmt\" (UID: \"72b3e06f-eeaa-4db7-a2dd-ec97404219bb\") " pod="metallb-system/speaker-lzmmt" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.951389 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-metrics-certs\") pod \"speaker-lzmmt\" (UID: \"72b3e06f-eeaa-4db7-a2dd-ec97404219bb\") " pod="metallb-system/speaker-lzmmt" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.951412 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b2f2ae81-858e-49c7-9a13-00a35850e02d-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-ld5c2\" (UID: \"b2f2ae81-858e-49c7-9a13-00a35850e02d\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2" Dec 13 06:57:40 crc kubenswrapper[4644]: E1213 06:57:40.951555 4644 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 13 06:57:40 crc kubenswrapper[4644]: E1213 06:57:40.951659 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-memberlist podName:72b3e06f-eeaa-4db7-a2dd-ec97404219bb nodeName:}" failed. No retries permitted until 2025-12-13 06:57:41.951636422 +0000 UTC m=+724.166587245 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-memberlist") pod "speaker-lzmmt" (UID: "72b3e06f-eeaa-4db7-a2dd-ec97404219bb") : secret "metallb-memberlist" not found Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.957255 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-metrics-certs\") pod \"speaker-lzmmt\" (UID: \"72b3e06f-eeaa-4db7-a2dd-ec97404219bb\") " pod="metallb-system/speaker-lzmmt" Dec 13 06:57:40 crc kubenswrapper[4644]: I1213 06:57:40.957850 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b2f2ae81-858e-49c7-9a13-00a35850e02d-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-ld5c2\" (UID: \"b2f2ae81-858e-49c7-9a13-00a35850e02d\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2" Dec 13 06:57:41 crc kubenswrapper[4644]: I1213 06:57:41.079280 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-n2vrv" event={"ID":"e4fe3a30-e94f-40c6-b734-1385b3f14d4f","Type":"ContainerStarted","Data":"40cd6cc54f23dbf105bac15f9888b927309cf2afd95d14a7c4deda5eae11b653"} Dec 13 06:57:41 crc kubenswrapper[4644]: I1213 06:57:41.081392 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-55rq7" event={"ID":"3125ef9d-0678-4f6c-9080-6b08ac39744a","Type":"ContainerStarted","Data":"5a1a061fbdbab76a8779a378b4cf968f93f0ba5233e8caec22377b0209b93c47"} Dec 13 06:57:41 crc kubenswrapper[4644]: I1213 06:57:41.081536 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-55rq7" event={"ID":"3125ef9d-0678-4f6c-9080-6b08ac39744a","Type":"ContainerStarted","Data":"1d4a769f9ceb510fa53247093336eb52b8c6a99eacf4ce22c4bbcc69795a63d2"} Dec 13 06:57:41 crc kubenswrapper[4644]: I1213 06:57:41.081626 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5bddd4b946-55rq7" Dec 13 06:57:41 crc kubenswrapper[4644]: I1213 06:57:41.081705 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-55rq7" event={"ID":"3125ef9d-0678-4f6c-9080-6b08ac39744a","Type":"ContainerStarted","Data":"2cafd1d5f887a6cb1764a0e4e736477964506034756c061db7ab3a27c13ba2bc"} Dec 13 06:57:41 crc kubenswrapper[4644]: I1213 06:57:41.096369 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5bddd4b946-55rq7" podStartSLOduration=1.096348514 podStartE2EDuration="1.096348514s" podCreationTimestamp="2025-12-13 06:57:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:57:41.094427974 +0000 UTC m=+723.309378807" watchObservedRunningTime="2025-12-13 06:57:41.096348514 +0000 UTC m=+723.311299347" Dec 13 06:57:41 crc kubenswrapper[4644]: I1213 06:57:41.135957 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2" Dec 13 06:57:41 crc kubenswrapper[4644]: I1213 06:57:41.283271 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2"] Dec 13 06:57:41 crc kubenswrapper[4644]: W1213 06:57:41.287132 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb2f2ae81_858e_49c7_9a13_00a35850e02d.slice/crio-883e2d371668a2df7269624da9c6fe02c512b6a5f5519c396bf78d3e6577b49b WatchSource:0}: Error finding container 883e2d371668a2df7269624da9c6fe02c512b6a5f5519c396bf78d3e6577b49b: Status 404 returned error can't find the container with id 883e2d371668a2df7269624da9c6fe02c512b6a5f5519c396bf78d3e6577b49b Dec 13 06:57:41 crc kubenswrapper[4644]: I1213 06:57:41.967989 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-memberlist\") pod \"speaker-lzmmt\" (UID: \"72b3e06f-eeaa-4db7-a2dd-ec97404219bb\") " pod="metallb-system/speaker-lzmmt" Dec 13 06:57:41 crc kubenswrapper[4644]: I1213 06:57:41.972545 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/72b3e06f-eeaa-4db7-a2dd-ec97404219bb-memberlist\") pod \"speaker-lzmmt\" (UID: \"72b3e06f-eeaa-4db7-a2dd-ec97404219bb\") " pod="metallb-system/speaker-lzmmt" Dec 13 06:57:42 crc kubenswrapper[4644]: I1213 06:57:42.096622 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2" event={"ID":"b2f2ae81-858e-49c7-9a13-00a35850e02d","Type":"ContainerStarted","Data":"883e2d371668a2df7269624da9c6fe02c512b6a5f5519c396bf78d3e6577b49b"} Dec 13 06:57:42 crc kubenswrapper[4644]: I1213 06:57:42.106524 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-lzmmt" Dec 13 06:57:43 crc kubenswrapper[4644]: I1213 06:57:43.128287 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-lzmmt" event={"ID":"72b3e06f-eeaa-4db7-a2dd-ec97404219bb","Type":"ContainerStarted","Data":"b25fe3ed2fabe96bef8654b81990aba7451139a2160e7bf166a800206c9b5448"} Dec 13 06:57:43 crc kubenswrapper[4644]: I1213 06:57:43.128334 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-lzmmt" event={"ID":"72b3e06f-eeaa-4db7-a2dd-ec97404219bb","Type":"ContainerStarted","Data":"b9f4ee6649748a81d73d3deb08dd97e7fc0985312759965a0ddd5c936da5f7cc"} Dec 13 06:57:43 crc kubenswrapper[4644]: I1213 06:57:43.128345 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-lzmmt" event={"ID":"72b3e06f-eeaa-4db7-a2dd-ec97404219bb","Type":"ContainerStarted","Data":"18ed910417ba2e6527e47e297eb407079a53851cfef5647a4b13c44737d39a13"} Dec 13 06:57:43 crc kubenswrapper[4644]: I1213 06:57:43.128858 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-lzmmt" Dec 13 06:57:43 crc kubenswrapper[4644]: I1213 06:57:43.146938 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-lzmmt" podStartSLOduration=3.146921078 podStartE2EDuration="3.146921078s" podCreationTimestamp="2025-12-13 06:57:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:57:43.143320009 +0000 UTC m=+725.358270852" watchObservedRunningTime="2025-12-13 06:57:43.146921078 +0000 UTC m=+725.361871911" Dec 13 06:57:47 crc kubenswrapper[4644]: I1213 06:57:47.156056 4644 generic.go:334] "Generic (PLEG): container finished" podID="e4fe3a30-e94f-40c6-b734-1385b3f14d4f" containerID="49f609317aec7866e629ca5bc9fb362d5be8d0756dbbd727344977009cbff2ff" exitCode=0 Dec 13 06:57:47 crc kubenswrapper[4644]: I1213 06:57:47.156202 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-n2vrv" event={"ID":"e4fe3a30-e94f-40c6-b734-1385b3f14d4f","Type":"ContainerDied","Data":"49f609317aec7866e629ca5bc9fb362d5be8d0756dbbd727344977009cbff2ff"} Dec 13 06:57:47 crc kubenswrapper[4644]: I1213 06:57:47.158920 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2" event={"ID":"b2f2ae81-858e-49c7-9a13-00a35850e02d","Type":"ContainerStarted","Data":"a94aca406e13b90b0db3f866b3941a5249fffd665989a2834049bb78818dba78"} Dec 13 06:57:47 crc kubenswrapper[4644]: I1213 06:57:47.159079 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2" Dec 13 06:57:47 crc kubenswrapper[4644]: I1213 06:57:47.189962 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2" podStartSLOduration=1.8665117420000001 podStartE2EDuration="7.18992857s" podCreationTimestamp="2025-12-13 06:57:40 +0000 UTC" firstStartedPulling="2025-12-13 06:57:41.289944755 +0000 UTC m=+723.504895588" lastFinishedPulling="2025-12-13 06:57:46.613361593 +0000 UTC m=+728.828312416" observedRunningTime="2025-12-13 06:57:47.18855618 +0000 UTC m=+729.403507014" watchObservedRunningTime="2025-12-13 06:57:47.18992857 +0000 UTC m=+729.404879403" Dec 13 06:57:48 crc kubenswrapper[4644]: I1213 06:57:48.166383 4644 generic.go:334] "Generic (PLEG): container finished" podID="e4fe3a30-e94f-40c6-b734-1385b3f14d4f" containerID="c8953dec00c6c67894e4ffb2152332f89b692ba6016835de6c672739591a3318" exitCode=0 Dec 13 06:57:48 crc kubenswrapper[4644]: I1213 06:57:48.166515 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-n2vrv" event={"ID":"e4fe3a30-e94f-40c6-b734-1385b3f14d4f","Type":"ContainerDied","Data":"c8953dec00c6c67894e4ffb2152332f89b692ba6016835de6c672739591a3318"} Dec 13 06:57:49 crc kubenswrapper[4644]: I1213 06:57:49.172492 4644 generic.go:334] "Generic (PLEG): container finished" podID="e4fe3a30-e94f-40c6-b734-1385b3f14d4f" containerID="336a331e800b8fcf174e4fa85351f6cbe3f0e6f28d78c3fedffb3aec639a54ae" exitCode=0 Dec 13 06:57:49 crc kubenswrapper[4644]: I1213 06:57:49.172537 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-n2vrv" event={"ID":"e4fe3a30-e94f-40c6-b734-1385b3f14d4f","Type":"ContainerDied","Data":"336a331e800b8fcf174e4fa85351f6cbe3f0e6f28d78c3fedffb3aec639a54ae"} Dec 13 06:57:50 crc kubenswrapper[4644]: I1213 06:57:50.183388 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-n2vrv" event={"ID":"e4fe3a30-e94f-40c6-b734-1385b3f14d4f","Type":"ContainerStarted","Data":"ad16e7d99f5ef31767175f36727335a88e2e4e23a32dad2d8ff47831edd5ca85"} Dec 13 06:57:50 crc kubenswrapper[4644]: I1213 06:57:50.184749 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-n2vrv" event={"ID":"e4fe3a30-e94f-40c6-b734-1385b3f14d4f","Type":"ContainerStarted","Data":"63cfafe05451203ddaa5c4e15745937f30f25bdc8994e7e339b1de51de820372"} Dec 13 06:57:50 crc kubenswrapper[4644]: I1213 06:57:50.184829 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:50 crc kubenswrapper[4644]: I1213 06:57:50.184965 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-n2vrv" event={"ID":"e4fe3a30-e94f-40c6-b734-1385b3f14d4f","Type":"ContainerStarted","Data":"34b781382d45cac4b7bdfea8f5ff7af56a752c1238e09bff2839e241d3071f5a"} Dec 13 06:57:50 crc kubenswrapper[4644]: I1213 06:57:50.184977 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-n2vrv" event={"ID":"e4fe3a30-e94f-40c6-b734-1385b3f14d4f","Type":"ContainerStarted","Data":"1046a29db76b05250992416df4a2b355c384b53e74a99b247c55d0a2519f95f7"} Dec 13 06:57:50 crc kubenswrapper[4644]: I1213 06:57:50.184987 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-n2vrv" event={"ID":"e4fe3a30-e94f-40c6-b734-1385b3f14d4f","Type":"ContainerStarted","Data":"b4ded16e7aa478532ffb752c999677839d03d66c789f5d82f3c8557789722f16"} Dec 13 06:57:50 crc kubenswrapper[4644]: I1213 06:57:50.184997 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-n2vrv" event={"ID":"e4fe3a30-e94f-40c6-b734-1385b3f14d4f","Type":"ContainerStarted","Data":"6a5a07a72f91ef5abe4c2db91e3c8388492edb4acbf15c3ddcd8fd42417f48c4"} Dec 13 06:57:50 crc kubenswrapper[4644]: I1213 06:57:50.208281 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-n2vrv" podStartSLOduration=4.249363274 podStartE2EDuration="10.208265093s" podCreationTimestamp="2025-12-13 06:57:40 +0000 UTC" firstStartedPulling="2025-12-13 06:57:40.649397469 +0000 UTC m=+722.864348301" lastFinishedPulling="2025-12-13 06:57:46.608299286 +0000 UTC m=+728.823250120" observedRunningTime="2025-12-13 06:57:50.204048767 +0000 UTC m=+732.418999600" watchObservedRunningTime="2025-12-13 06:57:50.208265093 +0000 UTC m=+732.423215926" Dec 13 06:57:50 crc kubenswrapper[4644]: I1213 06:57:50.526947 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:50 crc kubenswrapper[4644]: I1213 06:57:50.557162 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:57:50 crc kubenswrapper[4644]: I1213 06:57:50.644158 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5bddd4b946-55rq7" Dec 13 06:57:52 crc kubenswrapper[4644]: I1213 06:57:52.110224 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-lzmmt" Dec 13 06:57:57 crc kubenswrapper[4644]: I1213 06:57:57.969757 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-sphms"] Dec 13 06:57:57 crc kubenswrapper[4644]: I1213 06:57:57.970787 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sphms" Dec 13 06:57:57 crc kubenswrapper[4644]: I1213 06:57:57.972859 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 13 06:57:57 crc kubenswrapper[4644]: I1213 06:57:57.973098 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-g6mfp" Dec 13 06:57:57 crc kubenswrapper[4644]: I1213 06:57:57.973207 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 13 06:57:57 crc kubenswrapper[4644]: I1213 06:57:57.978664 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-sphms"] Dec 13 06:57:58 crc kubenswrapper[4644]: I1213 06:57:58.116436 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bp8w6\" (UniqueName: \"kubernetes.io/projected/03f8f1aa-551a-4670-abee-795ece86f7f4-kube-api-access-bp8w6\") pod \"openstack-operator-index-sphms\" (UID: \"03f8f1aa-551a-4670-abee-795ece86f7f4\") " pod="openstack-operators/openstack-operator-index-sphms" Dec 13 06:57:58 crc kubenswrapper[4644]: I1213 06:57:58.218460 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bp8w6\" (UniqueName: \"kubernetes.io/projected/03f8f1aa-551a-4670-abee-795ece86f7f4-kube-api-access-bp8w6\") pod \"openstack-operator-index-sphms\" (UID: \"03f8f1aa-551a-4670-abee-795ece86f7f4\") " pod="openstack-operators/openstack-operator-index-sphms" Dec 13 06:57:58 crc kubenswrapper[4644]: I1213 06:57:58.235163 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bp8w6\" (UniqueName: \"kubernetes.io/projected/03f8f1aa-551a-4670-abee-795ece86f7f4-kube-api-access-bp8w6\") pod \"openstack-operator-index-sphms\" (UID: \"03f8f1aa-551a-4670-abee-795ece86f7f4\") " pod="openstack-operators/openstack-operator-index-sphms" Dec 13 06:57:58 crc kubenswrapper[4644]: I1213 06:57:58.288653 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sphms" Dec 13 06:57:58 crc kubenswrapper[4644]: I1213 06:57:58.643412 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-sphms"] Dec 13 06:57:58 crc kubenswrapper[4644]: W1213 06:57:58.649336 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03f8f1aa_551a_4670_abee_795ece86f7f4.slice/crio-91a2f5e6959e96a083a071b0c0c8e3225ac38b398a3a56ede8f5883dcbc12d4d WatchSource:0}: Error finding container 91a2f5e6959e96a083a071b0c0c8e3225ac38b398a3a56ede8f5883dcbc12d4d: Status 404 returned error can't find the container with id 91a2f5e6959e96a083a071b0c0c8e3225ac38b398a3a56ede8f5883dcbc12d4d Dec 13 06:57:59 crc kubenswrapper[4644]: I1213 06:57:59.227189 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sphms" event={"ID":"03f8f1aa-551a-4670-abee-795ece86f7f4","Type":"ContainerStarted","Data":"91a2f5e6959e96a083a071b0c0c8e3225ac38b398a3a56ede8f5883dcbc12d4d"} Dec 13 06:58:00 crc kubenswrapper[4644]: I1213 06:58:00.234214 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sphms" event={"ID":"03f8f1aa-551a-4670-abee-795ece86f7f4","Type":"ContainerStarted","Data":"8bd961492748e9f054fb44587505098b36832fae1ab97ada61603518224c9f17"} Dec 13 06:58:00 crc kubenswrapper[4644]: I1213 06:58:00.249682 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-sphms" podStartSLOduration=1.835381773 podStartE2EDuration="3.249653624s" podCreationTimestamp="2025-12-13 06:57:57 +0000 UTC" firstStartedPulling="2025-12-13 06:57:58.650786393 +0000 UTC m=+740.865737216" lastFinishedPulling="2025-12-13 06:58:00.065058233 +0000 UTC m=+742.280009067" observedRunningTime="2025-12-13 06:58:00.247729977 +0000 UTC m=+742.462680811" watchObservedRunningTime="2025-12-13 06:58:00.249653624 +0000 UTC m=+742.464604458" Dec 13 06:58:00 crc kubenswrapper[4644]: I1213 06:58:00.528779 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-n2vrv" Dec 13 06:58:01 crc kubenswrapper[4644]: I1213 06:58:01.139708 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-ld5c2" Dec 13 06:58:03 crc kubenswrapper[4644]: I1213 06:58:03.164531 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-sphms"] Dec 13 06:58:03 crc kubenswrapper[4644]: I1213 06:58:03.164854 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-sphms" podUID="03f8f1aa-551a-4670-abee-795ece86f7f4" containerName="registry-server" containerID="cri-o://8bd961492748e9f054fb44587505098b36832fae1ab97ada61603518224c9f17" gracePeriod=2 Dec 13 06:58:03 crc kubenswrapper[4644]: I1213 06:58:03.484229 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sphms" Dec 13 06:58:03 crc kubenswrapper[4644]: I1213 06:58:03.593146 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bp8w6\" (UniqueName: \"kubernetes.io/projected/03f8f1aa-551a-4670-abee-795ece86f7f4-kube-api-access-bp8w6\") pod \"03f8f1aa-551a-4670-abee-795ece86f7f4\" (UID: \"03f8f1aa-551a-4670-abee-795ece86f7f4\") " Dec 13 06:58:03 crc kubenswrapper[4644]: I1213 06:58:03.598723 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03f8f1aa-551a-4670-abee-795ece86f7f4-kube-api-access-bp8w6" (OuterVolumeSpecName: "kube-api-access-bp8w6") pod "03f8f1aa-551a-4670-abee-795ece86f7f4" (UID: "03f8f1aa-551a-4670-abee-795ece86f7f4"). InnerVolumeSpecName "kube-api-access-bp8w6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:58:03 crc kubenswrapper[4644]: I1213 06:58:03.694886 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bp8w6\" (UniqueName: \"kubernetes.io/projected/03f8f1aa-551a-4670-abee-795ece86f7f4-kube-api-access-bp8w6\") on node \"crc\" DevicePath \"\"" Dec 13 06:58:03 crc kubenswrapper[4644]: I1213 06:58:03.769249 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-2ktgc"] Dec 13 06:58:03 crc kubenswrapper[4644]: E1213 06:58:03.769709 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03f8f1aa-551a-4670-abee-795ece86f7f4" containerName="registry-server" Dec 13 06:58:03 crc kubenswrapper[4644]: I1213 06:58:03.769797 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="03f8f1aa-551a-4670-abee-795ece86f7f4" containerName="registry-server" Dec 13 06:58:03 crc kubenswrapper[4644]: I1213 06:58:03.769994 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="03f8f1aa-551a-4670-abee-795ece86f7f4" containerName="registry-server" Dec 13 06:58:03 crc kubenswrapper[4644]: I1213 06:58:03.770414 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2ktgc" Dec 13 06:58:03 crc kubenswrapper[4644]: I1213 06:58:03.776515 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2ktgc"] Dec 13 06:58:03 crc kubenswrapper[4644]: I1213 06:58:03.797182 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ll872\" (UniqueName: \"kubernetes.io/projected/8a8b2af3-ff75-4a0a-a3ec-6f1b90619082-kube-api-access-ll872\") pod \"openstack-operator-index-2ktgc\" (UID: \"8a8b2af3-ff75-4a0a-a3ec-6f1b90619082\") " pod="openstack-operators/openstack-operator-index-2ktgc" Dec 13 06:58:03 crc kubenswrapper[4644]: I1213 06:58:03.899002 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ll872\" (UniqueName: \"kubernetes.io/projected/8a8b2af3-ff75-4a0a-a3ec-6f1b90619082-kube-api-access-ll872\") pod \"openstack-operator-index-2ktgc\" (UID: \"8a8b2af3-ff75-4a0a-a3ec-6f1b90619082\") " pod="openstack-operators/openstack-operator-index-2ktgc" Dec 13 06:58:03 crc kubenswrapper[4644]: I1213 06:58:03.916124 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ll872\" (UniqueName: \"kubernetes.io/projected/8a8b2af3-ff75-4a0a-a3ec-6f1b90619082-kube-api-access-ll872\") pod \"openstack-operator-index-2ktgc\" (UID: \"8a8b2af3-ff75-4a0a-a3ec-6f1b90619082\") " pod="openstack-operators/openstack-operator-index-2ktgc" Dec 13 06:58:04 crc kubenswrapper[4644]: I1213 06:58:04.082885 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2ktgc" Dec 13 06:58:04 crc kubenswrapper[4644]: I1213 06:58:04.256805 4644 generic.go:334] "Generic (PLEG): container finished" podID="03f8f1aa-551a-4670-abee-795ece86f7f4" containerID="8bd961492748e9f054fb44587505098b36832fae1ab97ada61603518224c9f17" exitCode=0 Dec 13 06:58:04 crc kubenswrapper[4644]: I1213 06:58:04.256849 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sphms" event={"ID":"03f8f1aa-551a-4670-abee-795ece86f7f4","Type":"ContainerDied","Data":"8bd961492748e9f054fb44587505098b36832fae1ab97ada61603518224c9f17"} Dec 13 06:58:04 crc kubenswrapper[4644]: I1213 06:58:04.256883 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sphms" Dec 13 06:58:04 crc kubenswrapper[4644]: I1213 06:58:04.256900 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sphms" event={"ID":"03f8f1aa-551a-4670-abee-795ece86f7f4","Type":"ContainerDied","Data":"91a2f5e6959e96a083a071b0c0c8e3225ac38b398a3a56ede8f5883dcbc12d4d"} Dec 13 06:58:04 crc kubenswrapper[4644]: I1213 06:58:04.256921 4644 scope.go:117] "RemoveContainer" containerID="8bd961492748e9f054fb44587505098b36832fae1ab97ada61603518224c9f17" Dec 13 06:58:04 crc kubenswrapper[4644]: I1213 06:58:04.274816 4644 scope.go:117] "RemoveContainer" containerID="8bd961492748e9f054fb44587505098b36832fae1ab97ada61603518224c9f17" Dec 13 06:58:04 crc kubenswrapper[4644]: E1213 06:58:04.275269 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bd961492748e9f054fb44587505098b36832fae1ab97ada61603518224c9f17\": container with ID starting with 8bd961492748e9f054fb44587505098b36832fae1ab97ada61603518224c9f17 not found: ID does not exist" containerID="8bd961492748e9f054fb44587505098b36832fae1ab97ada61603518224c9f17" Dec 13 06:58:04 crc kubenswrapper[4644]: I1213 06:58:04.275300 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bd961492748e9f054fb44587505098b36832fae1ab97ada61603518224c9f17"} err="failed to get container status \"8bd961492748e9f054fb44587505098b36832fae1ab97ada61603518224c9f17\": rpc error: code = NotFound desc = could not find container \"8bd961492748e9f054fb44587505098b36832fae1ab97ada61603518224c9f17\": container with ID starting with 8bd961492748e9f054fb44587505098b36832fae1ab97ada61603518224c9f17 not found: ID does not exist" Dec 13 06:58:04 crc kubenswrapper[4644]: I1213 06:58:04.279629 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-sphms"] Dec 13 06:58:04 crc kubenswrapper[4644]: I1213 06:58:04.282407 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-sphms"] Dec 13 06:58:04 crc kubenswrapper[4644]: I1213 06:58:04.394996 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03f8f1aa-551a-4670-abee-795ece86f7f4" path="/var/lib/kubelet/pods/03f8f1aa-551a-4670-abee-795ece86f7f4/volumes" Dec 13 06:58:04 crc kubenswrapper[4644]: I1213 06:58:04.456776 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2ktgc"] Dec 13 06:58:04 crc kubenswrapper[4644]: W1213 06:58:04.460642 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a8b2af3_ff75_4a0a_a3ec_6f1b90619082.slice/crio-1a85f8031df8a4f4344c260f3bcff0f487e371165c3545775ebb3beb9300c516 WatchSource:0}: Error finding container 1a85f8031df8a4f4344c260f3bcff0f487e371165c3545775ebb3beb9300c516: Status 404 returned error can't find the container with id 1a85f8031df8a4f4344c260f3bcff0f487e371165c3545775ebb3beb9300c516 Dec 13 06:58:05 crc kubenswrapper[4644]: I1213 06:58:05.263765 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2ktgc" event={"ID":"8a8b2af3-ff75-4a0a-a3ec-6f1b90619082","Type":"ContainerStarted","Data":"b275b9d2f1fac7a195cbbd234b3982b17999a9bd6670d1821207851eba590c71"} Dec 13 06:58:05 crc kubenswrapper[4644]: I1213 06:58:05.264032 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2ktgc" event={"ID":"8a8b2af3-ff75-4a0a-a3ec-6f1b90619082","Type":"ContainerStarted","Data":"1a85f8031df8a4f4344c260f3bcff0f487e371165c3545775ebb3beb9300c516"} Dec 13 06:58:05 crc kubenswrapper[4644]: I1213 06:58:05.286214 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-2ktgc" podStartSLOduration=1.633178733 podStartE2EDuration="2.286199349s" podCreationTimestamp="2025-12-13 06:58:03 +0000 UTC" firstStartedPulling="2025-12-13 06:58:04.464393534 +0000 UTC m=+746.679344368" lastFinishedPulling="2025-12-13 06:58:05.117414151 +0000 UTC m=+747.332364984" observedRunningTime="2025-12-13 06:58:05.28473208 +0000 UTC m=+747.499682913" watchObservedRunningTime="2025-12-13 06:58:05.286199349 +0000 UTC m=+747.501150182" Dec 13 06:58:05 crc kubenswrapper[4644]: I1213 06:58:05.741906 4644 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 13 06:58:09 crc kubenswrapper[4644]: I1213 06:58:09.754084 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 06:58:09 crc kubenswrapper[4644]: I1213 06:58:09.754157 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 06:58:09 crc kubenswrapper[4644]: I1213 06:58:09.754207 4644 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 06:58:09 crc kubenswrapper[4644]: I1213 06:58:09.754847 4644 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2487f8fbc172ccc82773d3da9a7aed4fb2a0c9cb73ab10d78d14719b9fd79f00"} pod="openshift-machine-config-operator/machine-config-daemon-45tj4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 06:58:09 crc kubenswrapper[4644]: I1213 06:58:09.754901 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" containerID="cri-o://2487f8fbc172ccc82773d3da9a7aed4fb2a0c9cb73ab10d78d14719b9fd79f00" gracePeriod=600 Dec 13 06:58:10 crc kubenswrapper[4644]: I1213 06:58:10.299147 4644 generic.go:334] "Generic (PLEG): container finished" podID="48240f19-087e-4597-b448-ab1a190a5027" containerID="2487f8fbc172ccc82773d3da9a7aed4fb2a0c9cb73ab10d78d14719b9fd79f00" exitCode=0 Dec 13 06:58:10 crc kubenswrapper[4644]: I1213 06:58:10.299247 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerDied","Data":"2487f8fbc172ccc82773d3da9a7aed4fb2a0c9cb73ab10d78d14719b9fd79f00"} Dec 13 06:58:10 crc kubenswrapper[4644]: I1213 06:58:10.299422 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerStarted","Data":"5e6f928015ce5189200b70ce1217403ef17ccffc81e2b7876249680646ea25ba"} Dec 13 06:58:10 crc kubenswrapper[4644]: I1213 06:58:10.299475 4644 scope.go:117] "RemoveContainer" containerID="8a334c8ac9cfbcecc238f0daede728d1156a22a87c2dd064837832a2d8e79ebe" Dec 13 06:58:14 crc kubenswrapper[4644]: I1213 06:58:14.083127 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-2ktgc" Dec 13 06:58:14 crc kubenswrapper[4644]: I1213 06:58:14.083356 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-2ktgc" Dec 13 06:58:14 crc kubenswrapper[4644]: I1213 06:58:14.107979 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-2ktgc" Dec 13 06:58:14 crc kubenswrapper[4644]: I1213 06:58:14.344363 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-2ktgc" Dec 13 06:58:15 crc kubenswrapper[4644]: I1213 06:58:15.996609 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx"] Dec 13 06:58:15 crc kubenswrapper[4644]: I1213 06:58:15.997696 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" Dec 13 06:58:15 crc kubenswrapper[4644]: I1213 06:58:15.999486 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-5n9fn" Dec 13 06:58:16 crc kubenswrapper[4644]: I1213 06:58:16.004585 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx"] Dec 13 06:58:16 crc kubenswrapper[4644]: I1213 06:58:16.067457 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-util\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx\" (UID: \"69c15b9d-0c5d-472e-9a3f-b9b442ca557c\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" Dec 13 06:58:16 crc kubenswrapper[4644]: I1213 06:58:16.067521 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-bundle\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx\" (UID: \"69c15b9d-0c5d-472e-9a3f-b9b442ca557c\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" Dec 13 06:58:16 crc kubenswrapper[4644]: I1213 06:58:16.067608 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rx9f6\" (UniqueName: \"kubernetes.io/projected/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-kube-api-access-rx9f6\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx\" (UID: \"69c15b9d-0c5d-472e-9a3f-b9b442ca557c\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" Dec 13 06:58:16 crc kubenswrapper[4644]: I1213 06:58:16.169227 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-util\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx\" (UID: \"69c15b9d-0c5d-472e-9a3f-b9b442ca557c\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" Dec 13 06:58:16 crc kubenswrapper[4644]: I1213 06:58:16.169290 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-bundle\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx\" (UID: \"69c15b9d-0c5d-472e-9a3f-b9b442ca557c\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" Dec 13 06:58:16 crc kubenswrapper[4644]: I1213 06:58:16.169350 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rx9f6\" (UniqueName: \"kubernetes.io/projected/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-kube-api-access-rx9f6\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx\" (UID: \"69c15b9d-0c5d-472e-9a3f-b9b442ca557c\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" Dec 13 06:58:16 crc kubenswrapper[4644]: I1213 06:58:16.169783 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-util\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx\" (UID: \"69c15b9d-0c5d-472e-9a3f-b9b442ca557c\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" Dec 13 06:58:16 crc kubenswrapper[4644]: I1213 06:58:16.171250 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-bundle\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx\" (UID: \"69c15b9d-0c5d-472e-9a3f-b9b442ca557c\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" Dec 13 06:58:16 crc kubenswrapper[4644]: I1213 06:58:16.186066 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rx9f6\" (UniqueName: \"kubernetes.io/projected/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-kube-api-access-rx9f6\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx\" (UID: \"69c15b9d-0c5d-472e-9a3f-b9b442ca557c\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" Dec 13 06:58:16 crc kubenswrapper[4644]: I1213 06:58:16.314299 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" Dec 13 06:58:16 crc kubenswrapper[4644]: I1213 06:58:16.673197 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx"] Dec 13 06:58:17 crc kubenswrapper[4644]: I1213 06:58:17.339646 4644 generic.go:334] "Generic (PLEG): container finished" podID="69c15b9d-0c5d-472e-9a3f-b9b442ca557c" containerID="1d37ca69162c9cd2d4fa26b8a084c1548ffbe7f0816b2c7d00f6dba79910c2b5" exitCode=0 Dec 13 06:58:17 crc kubenswrapper[4644]: I1213 06:58:17.339689 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" event={"ID":"69c15b9d-0c5d-472e-9a3f-b9b442ca557c","Type":"ContainerDied","Data":"1d37ca69162c9cd2d4fa26b8a084c1548ffbe7f0816b2c7d00f6dba79910c2b5"} Dec 13 06:58:17 crc kubenswrapper[4644]: I1213 06:58:17.339716 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" event={"ID":"69c15b9d-0c5d-472e-9a3f-b9b442ca557c","Type":"ContainerStarted","Data":"b571115594a39aaaede1df0425d34b55cdc309d6610045279235e15fc26edb15"} Dec 13 06:58:18 crc kubenswrapper[4644]: I1213 06:58:18.348748 4644 generic.go:334] "Generic (PLEG): container finished" podID="69c15b9d-0c5d-472e-9a3f-b9b442ca557c" containerID="02fc8e15651cdb64c169ccf962dbb1afe531861be5dc5f1f90d48bf2dbb9cc96" exitCode=0 Dec 13 06:58:18 crc kubenswrapper[4644]: I1213 06:58:18.348830 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" event={"ID":"69c15b9d-0c5d-472e-9a3f-b9b442ca557c","Type":"ContainerDied","Data":"02fc8e15651cdb64c169ccf962dbb1afe531861be5dc5f1f90d48bf2dbb9cc96"} Dec 13 06:58:19 crc kubenswrapper[4644]: I1213 06:58:19.356015 4644 generic.go:334] "Generic (PLEG): container finished" podID="69c15b9d-0c5d-472e-9a3f-b9b442ca557c" containerID="ff2a790534b05bf86adadd4bf2129f9d6d38046b16261bfe6f1c73c253bf1014" exitCode=0 Dec 13 06:58:19 crc kubenswrapper[4644]: I1213 06:58:19.356096 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" event={"ID":"69c15b9d-0c5d-472e-9a3f-b9b442ca557c","Type":"ContainerDied","Data":"ff2a790534b05bf86adadd4bf2129f9d6d38046b16261bfe6f1c73c253bf1014"} Dec 13 06:58:20 crc kubenswrapper[4644]: I1213 06:58:20.569020 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" Dec 13 06:58:20 crc kubenswrapper[4644]: I1213 06:58:20.632473 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-bundle\") pod \"69c15b9d-0c5d-472e-9a3f-b9b442ca557c\" (UID: \"69c15b9d-0c5d-472e-9a3f-b9b442ca557c\") " Dec 13 06:58:20 crc kubenswrapper[4644]: I1213 06:58:20.632556 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rx9f6\" (UniqueName: \"kubernetes.io/projected/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-kube-api-access-rx9f6\") pod \"69c15b9d-0c5d-472e-9a3f-b9b442ca557c\" (UID: \"69c15b9d-0c5d-472e-9a3f-b9b442ca557c\") " Dec 13 06:58:20 crc kubenswrapper[4644]: I1213 06:58:20.632593 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-util\") pod \"69c15b9d-0c5d-472e-9a3f-b9b442ca557c\" (UID: \"69c15b9d-0c5d-472e-9a3f-b9b442ca557c\") " Dec 13 06:58:20 crc kubenswrapper[4644]: I1213 06:58:20.633344 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-bundle" (OuterVolumeSpecName: "bundle") pod "69c15b9d-0c5d-472e-9a3f-b9b442ca557c" (UID: "69c15b9d-0c5d-472e-9a3f-b9b442ca557c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:58:20 crc kubenswrapper[4644]: I1213 06:58:20.638137 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-kube-api-access-rx9f6" (OuterVolumeSpecName: "kube-api-access-rx9f6") pod "69c15b9d-0c5d-472e-9a3f-b9b442ca557c" (UID: "69c15b9d-0c5d-472e-9a3f-b9b442ca557c"). InnerVolumeSpecName "kube-api-access-rx9f6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 06:58:20 crc kubenswrapper[4644]: I1213 06:58:20.643190 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-util" (OuterVolumeSpecName: "util") pod "69c15b9d-0c5d-472e-9a3f-b9b442ca557c" (UID: "69c15b9d-0c5d-472e-9a3f-b9b442ca557c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 06:58:20 crc kubenswrapper[4644]: I1213 06:58:20.734585 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rx9f6\" (UniqueName: \"kubernetes.io/projected/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-kube-api-access-rx9f6\") on node \"crc\" DevicePath \"\"" Dec 13 06:58:20 crc kubenswrapper[4644]: I1213 06:58:20.734615 4644 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-util\") on node \"crc\" DevicePath \"\"" Dec 13 06:58:20 crc kubenswrapper[4644]: I1213 06:58:20.734643 4644 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/69c15b9d-0c5d-472e-9a3f-b9b442ca557c-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 06:58:21 crc kubenswrapper[4644]: I1213 06:58:21.367349 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" event={"ID":"69c15b9d-0c5d-472e-9a3f-b9b442ca557c","Type":"ContainerDied","Data":"b571115594a39aaaede1df0425d34b55cdc309d6610045279235e15fc26edb15"} Dec 13 06:58:21 crc kubenswrapper[4644]: I1213 06:58:21.367637 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b571115594a39aaaede1df0425d34b55cdc309d6610045279235e15fc26edb15" Dec 13 06:58:21 crc kubenswrapper[4644]: I1213 06:58:21.367757 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx" Dec 13 06:58:28 crc kubenswrapper[4644]: I1213 06:58:28.182099 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4"] Dec 13 06:58:28 crc kubenswrapper[4644]: E1213 06:58:28.182714 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69c15b9d-0c5d-472e-9a3f-b9b442ca557c" containerName="extract" Dec 13 06:58:28 crc kubenswrapper[4644]: I1213 06:58:28.182725 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="69c15b9d-0c5d-472e-9a3f-b9b442ca557c" containerName="extract" Dec 13 06:58:28 crc kubenswrapper[4644]: E1213 06:58:28.182735 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69c15b9d-0c5d-472e-9a3f-b9b442ca557c" containerName="pull" Dec 13 06:58:28 crc kubenswrapper[4644]: I1213 06:58:28.182740 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="69c15b9d-0c5d-472e-9a3f-b9b442ca557c" containerName="pull" Dec 13 06:58:28 crc kubenswrapper[4644]: E1213 06:58:28.182758 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69c15b9d-0c5d-472e-9a3f-b9b442ca557c" containerName="util" Dec 13 06:58:28 crc kubenswrapper[4644]: I1213 06:58:28.182764 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="69c15b9d-0c5d-472e-9a3f-b9b442ca557c" containerName="util" Dec 13 06:58:28 crc kubenswrapper[4644]: I1213 06:58:28.182850 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="69c15b9d-0c5d-472e-9a3f-b9b442ca557c" containerName="extract" Dec 13 06:58:28 crc kubenswrapper[4644]: I1213 06:58:28.183203 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4" Dec 13 06:58:28 crc kubenswrapper[4644]: I1213 06:58:28.185651 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-hp5tj" Dec 13 06:58:28 crc kubenswrapper[4644]: I1213 06:58:28.242729 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbxf8\" (UniqueName: \"kubernetes.io/projected/6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2-kube-api-access-lbxf8\") pod \"openstack-operator-controller-operator-57bbbf4567-2s5l4\" (UID: \"6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2\") " pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4" Dec 13 06:58:28 crc kubenswrapper[4644]: I1213 06:58:28.248201 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4"] Dec 13 06:58:28 crc kubenswrapper[4644]: I1213 06:58:28.343532 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbxf8\" (UniqueName: \"kubernetes.io/projected/6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2-kube-api-access-lbxf8\") pod \"openstack-operator-controller-operator-57bbbf4567-2s5l4\" (UID: \"6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2\") " pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4" Dec 13 06:58:28 crc kubenswrapper[4644]: I1213 06:58:28.361662 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbxf8\" (UniqueName: \"kubernetes.io/projected/6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2-kube-api-access-lbxf8\") pod \"openstack-operator-controller-operator-57bbbf4567-2s5l4\" (UID: \"6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2\") " pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4" Dec 13 06:58:28 crc kubenswrapper[4644]: I1213 06:58:28.497040 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4" Dec 13 06:58:28 crc kubenswrapper[4644]: I1213 06:58:28.859318 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4"] Dec 13 06:58:28 crc kubenswrapper[4644]: W1213 06:58:28.862872 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a7ad2fa_50e8_4a33_8b81_96d9fdc315b2.slice/crio-64c35f8fb4909c8acb7b5f776aec7f76ed6e9fe0a317e0930b76ce47c6954926 WatchSource:0}: Error finding container 64c35f8fb4909c8acb7b5f776aec7f76ed6e9fe0a317e0930b76ce47c6954926: Status 404 returned error can't find the container with id 64c35f8fb4909c8acb7b5f776aec7f76ed6e9fe0a317e0930b76ce47c6954926 Dec 13 06:58:29 crc kubenswrapper[4644]: I1213 06:58:29.412327 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4" event={"ID":"6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2","Type":"ContainerStarted","Data":"64c35f8fb4909c8acb7b5f776aec7f76ed6e9fe0a317e0930b76ce47c6954926"} Dec 13 06:58:33 crc kubenswrapper[4644]: I1213 06:58:33.435417 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4" event={"ID":"6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2","Type":"ContainerStarted","Data":"8300d463a6e0016f6d9f254db89768683d663200470b9bb5e5fb784b997bdb7a"} Dec 13 06:58:33 crc kubenswrapper[4644]: I1213 06:58:33.436581 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4" Dec 13 06:58:33 crc kubenswrapper[4644]: I1213 06:58:33.463092 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4" podStartSLOduration=1.990007436 podStartE2EDuration="5.463071242s" podCreationTimestamp="2025-12-13 06:58:28 +0000 UTC" firstStartedPulling="2025-12-13 06:58:28.866000699 +0000 UTC m=+771.080951522" lastFinishedPulling="2025-12-13 06:58:32.339064495 +0000 UTC m=+774.554015328" observedRunningTime="2025-12-13 06:58:33.459782028 +0000 UTC m=+775.674732861" watchObservedRunningTime="2025-12-13 06:58:33.463071242 +0000 UTC m=+775.678022075" Dec 13 06:58:38 crc kubenswrapper[4644]: I1213 06:58:38.501174 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.178715 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-95949466-9ffgl"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.179859 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-95949466-9ffgl" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.182631 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-bf9km" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.190598 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5cf45c46bd-tndds"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.191585 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-tndds" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.194111 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-m76tf" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.194164 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-95949466-9ffgl"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.197118 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-66f8b87655-h6nxs"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.197933 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-h6nxs" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.202277 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-5dm4f" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.207555 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5cf45c46bd-tndds"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.229533 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-767f9d7567-z2xnf"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.230429 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-z2xnf" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.232500 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-v66gt" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.244821 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6ccf486b9-zmcbq"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.245705 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-zmcbq" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.250094 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-59b8dcb766-9m7mp"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.254999 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-s9gcw" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.257624 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-9m7mp" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.261536 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-pg277" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.264162 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-767f9d7567-z2xnf"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.289869 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8zz6\" (UniqueName: \"kubernetes.io/projected/3ec726b0-e1c1-497a-9364-f483cdf9b69b-kube-api-access-s8zz6\") pod \"barbican-operator-controller-manager-95949466-9ffgl\" (UID: \"3ec726b0-e1c1-497a-9364-f483cdf9b69b\") " pod="openstack-operators/barbican-operator-controller-manager-95949466-9ffgl" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.289926 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9q42t\" (UniqueName: \"kubernetes.io/projected/11c2a2ff-6f82-4b30-909b-f0f8c1e92394-kube-api-access-9q42t\") pod \"cinder-operator-controller-manager-5cf45c46bd-tndds\" (UID: \"11c2a2ff-6f82-4b30-909b-f0f8c1e92394\") " pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-tndds" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.296508 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-59b8dcb766-9m7mp"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.306466 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6ccf486b9-zmcbq"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.309708 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-58944d7758-4p77w"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.310534 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.312548 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.320520 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-7lwpr" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.327528 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-f458558d7-fhckm"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.328400 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-f458558d7-fhckm" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.331850 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-hs8sk" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.335081 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-58944d7758-4p77w"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.342637 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-66f8b87655-h6nxs"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.357405 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-f458558d7-fhckm"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.377256 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.378734 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.385826 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-t72bx" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.398521 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8zz6\" (UniqueName: \"kubernetes.io/projected/3ec726b0-e1c1-497a-9364-f483cdf9b69b-kube-api-access-s8zz6\") pod \"barbican-operator-controller-manager-95949466-9ffgl\" (UID: \"3ec726b0-e1c1-497a-9364-f483cdf9b69b\") " pod="openstack-operators/barbican-operator-controller-manager-95949466-9ffgl" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.398591 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9q42t\" (UniqueName: \"kubernetes.io/projected/11c2a2ff-6f82-4b30-909b-f0f8c1e92394-kube-api-access-9q42t\") pod \"cinder-operator-controller-manager-5cf45c46bd-tndds\" (UID: \"11c2a2ff-6f82-4b30-909b-f0f8c1e92394\") " pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-tndds" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.398633 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82lrw\" (UniqueName: \"kubernetes.io/projected/32a38d48-fe84-4ede-860c-ae76de27cbe6-kube-api-access-82lrw\") pod \"designate-operator-controller-manager-66f8b87655-h6nxs\" (UID: \"32a38d48-fe84-4ede-860c-ae76de27cbe6\") " pod="openstack-operators/designate-operator-controller-manager-66f8b87655-h6nxs" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.398732 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4m5v\" (UniqueName: \"kubernetes.io/projected/fd6f17a4-40cc-4465-8c67-58c67230344d-kube-api-access-j4m5v\") pod \"glance-operator-controller-manager-767f9d7567-z2xnf\" (UID: \"fd6f17a4-40cc-4465-8c67-58c67230344d\") " pod="openstack-operators/glance-operator-controller-manager-767f9d7567-z2xnf" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.398749 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4v444\" (UniqueName: \"kubernetes.io/projected/aac5283b-a0c7-4cac-8a72-07ca5444b743-kube-api-access-4v444\") pod \"horizon-operator-controller-manager-6ccf486b9-zmcbq\" (UID: \"aac5283b-a0c7-4cac-8a72-07ca5444b743\") " pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-zmcbq" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.398793 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mk54\" (UniqueName: \"kubernetes.io/projected/98e02ffd-3d31-4b00-8bc7-5f225cdf9fc5-kube-api-access-8mk54\") pod \"heat-operator-controller-manager-59b8dcb766-9m7mp\" (UID: \"98e02ffd-3d31-4b00-8bc7-5f225cdf9fc5\") " pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-9m7mp" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.403909 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.414990 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.415971 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.420079 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-6cc58" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.420096 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-f76f4954c-cmcbp"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.421162 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-cmcbp" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.425312 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-f76f4954c-cmcbp"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.428469 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-crnvv" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.434382 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8zz6\" (UniqueName: \"kubernetes.io/projected/3ec726b0-e1c1-497a-9364-f483cdf9b69b-kube-api-access-s8zz6\") pod \"barbican-operator-controller-manager-95949466-9ffgl\" (UID: \"3ec726b0-e1c1-497a-9364-f483cdf9b69b\") " pod="openstack-operators/barbican-operator-controller-manager-95949466-9ffgl" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.436158 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9q42t\" (UniqueName: \"kubernetes.io/projected/11c2a2ff-6f82-4b30-909b-f0f8c1e92394-kube-api-access-9q42t\") pod \"cinder-operator-controller-manager-5cf45c46bd-tndds\" (UID: \"11c2a2ff-6f82-4b30-909b-f0f8c1e92394\") " pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-tndds" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.460147 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.464151 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.465132 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.471918 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-ghtqd" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.498237 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.498636 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-95949466-9ffgl" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.500734 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mk54\" (UniqueName: \"kubernetes.io/projected/98e02ffd-3d31-4b00-8bc7-5f225cdf9fc5-kube-api-access-8mk54\") pod \"heat-operator-controller-manager-59b8dcb766-9m7mp\" (UID: \"98e02ffd-3d31-4b00-8bc7-5f225cdf9fc5\") " pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-9m7mp" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.500824 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zsdx\" (UniqueName: \"kubernetes.io/projected/b8c3ef08-66ae-474e-8204-2338afb7d08d-kube-api-access-4zsdx\") pod \"infra-operator-controller-manager-58944d7758-4p77w\" (UID: \"b8c3ef08-66ae-474e-8204-2338afb7d08d\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.500847 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82lrw\" (UniqueName: \"kubernetes.io/projected/32a38d48-fe84-4ede-860c-ae76de27cbe6-kube-api-access-82lrw\") pod \"designate-operator-controller-manager-66f8b87655-h6nxs\" (UID: \"32a38d48-fe84-4ede-860c-ae76de27cbe6\") " pod="openstack-operators/designate-operator-controller-manager-66f8b87655-h6nxs" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.500862 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25qj6\" (UniqueName: \"kubernetes.io/projected/ac41b645-ea22-42ac-846e-fa16d0beaee4-kube-api-access-25qj6\") pod \"ironic-operator-controller-manager-f458558d7-fhckm\" (UID: \"ac41b645-ea22-42ac-846e-fa16d0beaee4\") " pod="openstack-operators/ironic-operator-controller-manager-f458558d7-fhckm" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.500907 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert\") pod \"infra-operator-controller-manager-58944d7758-4p77w\" (UID: \"b8c3ef08-66ae-474e-8204-2338afb7d08d\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.500925 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvdml\" (UniqueName: \"kubernetes.io/projected/5e5582c5-50c3-4c4f-9693-16f2a71543ce-kube-api-access-pvdml\") pod \"keystone-operator-controller-manager-5c7cbf548f-jfdpn\" (UID: \"5e5582c5-50c3-4c4f-9693-16f2a71543ce\") " pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.500950 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4m5v\" (UniqueName: \"kubernetes.io/projected/fd6f17a4-40cc-4465-8c67-58c67230344d-kube-api-access-j4m5v\") pod \"glance-operator-controller-manager-767f9d7567-z2xnf\" (UID: \"fd6f17a4-40cc-4465-8c67-58c67230344d\") " pod="openstack-operators/glance-operator-controller-manager-767f9d7567-z2xnf" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.500967 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4v444\" (UniqueName: \"kubernetes.io/projected/aac5283b-a0c7-4cac-8a72-07ca5444b743-kube-api-access-4v444\") pod \"horizon-operator-controller-manager-6ccf486b9-zmcbq\" (UID: \"aac5283b-a0c7-4cac-8a72-07ca5444b743\") " pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-zmcbq" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.504272 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-464zb"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.505275 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-464zb" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.512692 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-s8d8k" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.512998 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-tndds" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.515901 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-464zb"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.533218 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-68c649d9d-k2fqd"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.534164 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-k2fqd" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.538849 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-xprfr" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.538963 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82lrw\" (UniqueName: \"kubernetes.io/projected/32a38d48-fe84-4ede-860c-ae76de27cbe6-kube-api-access-82lrw\") pod \"designate-operator-controller-manager-66f8b87655-h6nxs\" (UID: \"32a38d48-fe84-4ede-860c-ae76de27cbe6\") " pod="openstack-operators/designate-operator-controller-manager-66f8b87655-h6nxs" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.544119 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4v444\" (UniqueName: \"kubernetes.io/projected/aac5283b-a0c7-4cac-8a72-07ca5444b743-kube-api-access-4v444\") pod \"horizon-operator-controller-manager-6ccf486b9-zmcbq\" (UID: \"aac5283b-a0c7-4cac-8a72-07ca5444b743\") " pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-zmcbq" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.544704 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mk54\" (UniqueName: \"kubernetes.io/projected/98e02ffd-3d31-4b00-8bc7-5f225cdf9fc5-kube-api-access-8mk54\") pod \"heat-operator-controller-manager-59b8dcb766-9m7mp\" (UID: \"98e02ffd-3d31-4b00-8bc7-5f225cdf9fc5\") " pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-9m7mp" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.565068 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4m5v\" (UniqueName: \"kubernetes.io/projected/fd6f17a4-40cc-4465-8c67-58c67230344d-kube-api-access-j4m5v\") pod \"glance-operator-controller-manager-767f9d7567-z2xnf\" (UID: \"fd6f17a4-40cc-4465-8c67-58c67230344d\") " pod="openstack-operators/glance-operator-controller-manager-767f9d7567-z2xnf" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.576430 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-zmcbq" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.589770 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-9m7mp" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.605420 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-68c649d9d-k2fqd"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.606016 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8lms\" (UniqueName: \"kubernetes.io/projected/42b2a1fb-b5d1-46ff-932e-d831b53febf7-kube-api-access-x8lms\") pod \"manila-operator-controller-manager-5fdd9786f7-58rgg\" (UID: \"42b2a1fb-b5d1-46ff-932e-d831b53febf7\") " pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.606083 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zsdx\" (UniqueName: \"kubernetes.io/projected/b8c3ef08-66ae-474e-8204-2338afb7d08d-kube-api-access-4zsdx\") pod \"infra-operator-controller-manager-58944d7758-4p77w\" (UID: \"b8c3ef08-66ae-474e-8204-2338afb7d08d\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.606110 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vh9gc\" (UniqueName: \"kubernetes.io/projected/d143ef34-f1db-411d-941b-c229888e22b2-kube-api-access-vh9gc\") pod \"neutron-operator-controller-manager-7cd87b778f-vgdnd\" (UID: \"d143ef34-f1db-411d-941b-c229888e22b2\") " pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.606132 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25qj6\" (UniqueName: \"kubernetes.io/projected/ac41b645-ea22-42ac-846e-fa16d0beaee4-kube-api-access-25qj6\") pod \"ironic-operator-controller-manager-f458558d7-fhckm\" (UID: \"ac41b645-ea22-42ac-846e-fa16d0beaee4\") " pod="openstack-operators/ironic-operator-controller-manager-f458558d7-fhckm" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.606157 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72fz9\" (UniqueName: \"kubernetes.io/projected/90ea237e-4f56-4008-a2df-d3c404424374-kube-api-access-72fz9\") pod \"mariadb-operator-controller-manager-f76f4954c-cmcbp\" (UID: \"90ea237e-4f56-4008-a2df-d3c404424374\") " pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-cmcbp" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.606177 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert\") pod \"infra-operator-controller-manager-58944d7758-4p77w\" (UID: \"b8c3ef08-66ae-474e-8204-2338afb7d08d\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.606195 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvdml\" (UniqueName: \"kubernetes.io/projected/5e5582c5-50c3-4c4f-9693-16f2a71543ce-kube-api-access-pvdml\") pod \"keystone-operator-controller-manager-5c7cbf548f-jfdpn\" (UID: \"5e5582c5-50c3-4c4f-9693-16f2a71543ce\") " pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn" Dec 13 06:59:15 crc kubenswrapper[4644]: E1213 06:59:15.606865 4644 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 13 06:59:15 crc kubenswrapper[4644]: E1213 06:59:15.606920 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert podName:b8c3ef08-66ae-474e-8204-2338afb7d08d nodeName:}" failed. No retries permitted until 2025-12-13 06:59:16.106900588 +0000 UTC m=+818.321851421 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert") pod "infra-operator-controller-manager-58944d7758-4p77w" (UID: "b8c3ef08-66ae-474e-8204-2338afb7d08d") : secret "infra-operator-webhook-server-cert" not found Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.607604 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.608531 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.616500 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.616989 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-q8l7h" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.617199 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.617801 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.621497 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-8fxr2" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.624417 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.625291 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.635257 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-7hppm" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.637263 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zsdx\" (UniqueName: \"kubernetes.io/projected/b8c3ef08-66ae-474e-8204-2338afb7d08d-kube-api-access-4zsdx\") pod \"infra-operator-controller-manager-58944d7758-4p77w\" (UID: \"b8c3ef08-66ae-474e-8204-2338afb7d08d\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.637302 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25qj6\" (UniqueName: \"kubernetes.io/projected/ac41b645-ea22-42ac-846e-fa16d0beaee4-kube-api-access-25qj6\") pod \"ironic-operator-controller-manager-f458558d7-fhckm\" (UID: \"ac41b645-ea22-42ac-846e-fa16d0beaee4\") " pod="openstack-operators/ironic-operator-controller-manager-f458558d7-fhckm" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.641526 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.643341 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvdml\" (UniqueName: \"kubernetes.io/projected/5e5582c5-50c3-4c4f-9693-16f2a71543ce-kube-api-access-pvdml\") pod \"keystone-operator-controller-manager-5c7cbf548f-jfdpn\" (UID: \"5e5582c5-50c3-4c4f-9693-16f2a71543ce\") " pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.648386 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-f458558d7-fhckm" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.650118 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.656973 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.662121 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.663295 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.666080 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-zxk9q" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.692514 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.702736 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.707076 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vh9gc\" (UniqueName: \"kubernetes.io/projected/d143ef34-f1db-411d-941b-c229888e22b2-kube-api-access-vh9gc\") pod \"neutron-operator-controller-manager-7cd87b778f-vgdnd\" (UID: \"d143ef34-f1db-411d-941b-c229888e22b2\") " pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.707142 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h6wp\" (UniqueName: \"kubernetes.io/projected/d86f3cba-c9ef-47eb-b04e-8f10ac1b0734-kube-api-access-4h6wp\") pod \"octavia-operator-controller-manager-68c649d9d-k2fqd\" (UID: \"d86f3cba-c9ef-47eb-b04e-8f10ac1b0734\") " pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-k2fqd" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.707182 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72fz9\" (UniqueName: \"kubernetes.io/projected/90ea237e-4f56-4008-a2df-d3c404424374-kube-api-access-72fz9\") pod \"mariadb-operator-controller-manager-f76f4954c-cmcbp\" (UID: \"90ea237e-4f56-4008-a2df-d3c404424374\") " pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-cmcbp" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.707284 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8lms\" (UniqueName: \"kubernetes.io/projected/42b2a1fb-b5d1-46ff-932e-d831b53febf7-kube-api-access-x8lms\") pod \"manila-operator-controller-manager-5fdd9786f7-58rgg\" (UID: \"42b2a1fb-b5d1-46ff-932e-d831b53febf7\") " pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.707315 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjq7g\" (UniqueName: \"kubernetes.io/projected/08cf1d52-d8b7-477f-92c7-1dd2732ff9e3-kube-api-access-qjq7g\") pod \"nova-operator-controller-manager-5fbbf8b6cc-464zb\" (UID: \"08cf1d52-d8b7-477f-92c7-1dd2732ff9e3\") " pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-464zb" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.724511 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-97d456b9-fqsfn"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.725848 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-fqsfn" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.734924 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8lms\" (UniqueName: \"kubernetes.io/projected/42b2a1fb-b5d1-46ff-932e-d831b53febf7-kube-api-access-x8lms\") pod \"manila-operator-controller-manager-5fdd9786f7-58rgg\" (UID: \"42b2a1fb-b5d1-46ff-932e-d831b53febf7\") " pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.737768 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-nmcvv" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.740851 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-97d456b9-fqsfn"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.755564 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72fz9\" (UniqueName: \"kubernetes.io/projected/90ea237e-4f56-4008-a2df-d3c404424374-kube-api-access-72fz9\") pod \"mariadb-operator-controller-manager-f76f4954c-cmcbp\" (UID: \"90ea237e-4f56-4008-a2df-d3c404424374\") " pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-cmcbp" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.767209 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vh9gc\" (UniqueName: \"kubernetes.io/projected/d143ef34-f1db-411d-941b-c229888e22b2-kube-api-access-vh9gc\") pod \"neutron-operator-controller-manager-7cd87b778f-vgdnd\" (UID: \"d143ef34-f1db-411d-941b-c229888e22b2\") " pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.777781 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-756ccf86c7-46n8m"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.783050 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.783892 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-cmcbp" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.786624 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-46n8m" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.789155 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-5p78g" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.791418 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.814375 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcsn7\" (UniqueName: \"kubernetes.io/projected/9f650b3c-af01-4ce4-a702-daab8d5affc5-kube-api-access-fcsn7\") pod \"ovn-operator-controller-manager-bf6d4f946-mtbhx\" (UID: \"9f650b3c-af01-4ce4-a702-daab8d5affc5\") " pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.814535 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b544qprx\" (UID: \"7cd61a98-cc77-41b1-a06f-912207565b37\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.814629 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rphhh\" (UniqueName: \"kubernetes.io/projected/2e6fefac-bf85-4f28-a30d-808e83a13141-kube-api-access-rphhh\") pod \"placement-operator-controller-manager-8665b56d78-kfnbp\" (UID: \"2e6fefac-bf85-4f28-a30d-808e83a13141\") " pod="openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.814753 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fd75c\" (UniqueName: \"kubernetes.io/projected/da649804-862c-45db-97ee-ad47fed7a72d-kube-api-access-fd75c\") pod \"swift-operator-controller-manager-5c6df8f9-bvc6f\" (UID: \"da649804-862c-45db-97ee-ad47fed7a72d\") " pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.814849 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h6wp\" (UniqueName: \"kubernetes.io/projected/d86f3cba-c9ef-47eb-b04e-8f10ac1b0734-kube-api-access-4h6wp\") pod \"octavia-operator-controller-manager-68c649d9d-k2fqd\" (UID: \"d86f3cba-c9ef-47eb-b04e-8f10ac1b0734\") " pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-k2fqd" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.815055 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcn8m\" (UniqueName: \"kubernetes.io/projected/7cd61a98-cc77-41b1-a06f-912207565b37-kube-api-access-pcn8m\") pod \"openstack-baremetal-operator-controller-manager-689f887b544qprx\" (UID: \"7cd61a98-cc77-41b1-a06f-912207565b37\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.815136 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjq7g\" (UniqueName: \"kubernetes.io/projected/08cf1d52-d8b7-477f-92c7-1dd2732ff9e3-kube-api-access-qjq7g\") pod \"nova-operator-controller-manager-5fbbf8b6cc-464zb\" (UID: \"08cf1d52-d8b7-477f-92c7-1dd2732ff9e3\") " pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-464zb" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.821569 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-h6nxs" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.841558 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-756ccf86c7-46n8m"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.846277 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h6wp\" (UniqueName: \"kubernetes.io/projected/d86f3cba-c9ef-47eb-b04e-8f10ac1b0734-kube-api-access-4h6wp\") pod \"octavia-operator-controller-manager-68c649d9d-k2fqd\" (UID: \"d86f3cba-c9ef-47eb-b04e-8f10ac1b0734\") " pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-k2fqd" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.849019 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-z2xnf" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.856629 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjq7g\" (UniqueName: \"kubernetes.io/projected/08cf1d52-d8b7-477f-92c7-1dd2732ff9e3-kube-api-access-qjq7g\") pod \"nova-operator-controller-manager-5fbbf8b6cc-464zb\" (UID: \"08cf1d52-d8b7-477f-92c7-1dd2732ff9e3\") " pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-464zb" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.869608 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.870489 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.872267 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-5g6p5" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.879951 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.899236 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.900134 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.907065 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.907210 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-k2fqd" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.907212 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.907748 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-d7t4k" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.917790 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcn8m\" (UniqueName: \"kubernetes.io/projected/7cd61a98-cc77-41b1-a06f-912207565b37-kube-api-access-pcn8m\") pod \"openstack-baremetal-operator-controller-manager-689f887b544qprx\" (UID: \"7cd61a98-cc77-41b1-a06f-912207565b37\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.917854 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6684\" (UniqueName: \"kubernetes.io/projected/a02dee9b-ffed-4a5a-b833-cb236c105371-kube-api-access-f6684\") pod \"test-operator-controller-manager-756ccf86c7-46n8m\" (UID: \"a02dee9b-ffed-4a5a-b833-cb236c105371\") " pod="openstack-operators/test-operator-controller-manager-756ccf86c7-46n8m" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.917895 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp"] Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.917899 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcsn7\" (UniqueName: \"kubernetes.io/projected/9f650b3c-af01-4ce4-a702-daab8d5affc5-kube-api-access-fcsn7\") pod \"ovn-operator-controller-manager-bf6d4f946-mtbhx\" (UID: \"9f650b3c-af01-4ce4-a702-daab8d5affc5\") " pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.917987 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b544qprx\" (UID: \"7cd61a98-cc77-41b1-a06f-912207565b37\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.918040 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxgdf\" (UniqueName: \"kubernetes.io/projected/1adec510-a153-47b2-ae1d-5430d4ff5e31-kube-api-access-jxgdf\") pod \"telemetry-operator-controller-manager-97d456b9-fqsfn\" (UID: \"1adec510-a153-47b2-ae1d-5430d4ff5e31\") " pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-fqsfn" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.918070 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rphhh\" (UniqueName: \"kubernetes.io/projected/2e6fefac-bf85-4f28-a30d-808e83a13141-kube-api-access-rphhh\") pod \"placement-operator-controller-manager-8665b56d78-kfnbp\" (UID: \"2e6fefac-bf85-4f28-a30d-808e83a13141\") " pod="openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.918092 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fd75c\" (UniqueName: \"kubernetes.io/projected/da649804-862c-45db-97ee-ad47fed7a72d-kube-api-access-fd75c\") pod \"swift-operator-controller-manager-5c6df8f9-bvc6f\" (UID: \"da649804-862c-45db-97ee-ad47fed7a72d\") " pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f" Dec 13 06:59:15 crc kubenswrapper[4644]: E1213 06:59:15.918145 4644 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 13 06:59:15 crc kubenswrapper[4644]: E1213 06:59:15.918252 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert podName:7cd61a98-cc77-41b1-a06f-912207565b37 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:16.418223439 +0000 UTC m=+818.633174282 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert") pod "openstack-baremetal-operator-controller-manager-689f887b544qprx" (UID: "7cd61a98-cc77-41b1-a06f-912207565b37") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.939126 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcsn7\" (UniqueName: \"kubernetes.io/projected/9f650b3c-af01-4ce4-a702-daab8d5affc5-kube-api-access-fcsn7\") pod \"ovn-operator-controller-manager-bf6d4f946-mtbhx\" (UID: \"9f650b3c-af01-4ce4-a702-daab8d5affc5\") " pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.943023 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rphhh\" (UniqueName: \"kubernetes.io/projected/2e6fefac-bf85-4f28-a30d-808e83a13141-kube-api-access-rphhh\") pod \"placement-operator-controller-manager-8665b56d78-kfnbp\" (UID: \"2e6fefac-bf85-4f28-a30d-808e83a13141\") " pod="openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.944224 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fd75c\" (UniqueName: \"kubernetes.io/projected/da649804-862c-45db-97ee-ad47fed7a72d-kube-api-access-fd75c\") pod \"swift-operator-controller-manager-5c6df8f9-bvc6f\" (UID: \"da649804-862c-45db-97ee-ad47fed7a72d\") " pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.949085 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcn8m\" (UniqueName: \"kubernetes.io/projected/7cd61a98-cc77-41b1-a06f-912207565b37-kube-api-access-pcn8m\") pod \"openstack-baremetal-operator-controller-manager-689f887b544qprx\" (UID: \"7cd61a98-cc77-41b1-a06f-912207565b37\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.985904 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx" Dec 13 06:59:15 crc kubenswrapper[4644]: I1213 06:59:15.999525 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gfl6d"] Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.000481 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gfl6d" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.003680 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-kwzgc" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.007528 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gfl6d"] Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.007925 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.024952 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxgdf\" (UniqueName: \"kubernetes.io/projected/1adec510-a153-47b2-ae1d-5430d4ff5e31-kube-api-access-jxgdf\") pod \"telemetry-operator-controller-manager-97d456b9-fqsfn\" (UID: \"1adec510-a153-47b2-ae1d-5430d4ff5e31\") " pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-fqsfn" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.025300 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbd5c\" (UniqueName: \"kubernetes.io/projected/48353918-3568-4a9c-a5d2-709fb831ee75-kube-api-access-vbd5c\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.025405 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.025568 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stbm5\" (UniqueName: \"kubernetes.io/projected/3b75e9d6-b3a1-46ec-ae83-830583970e9c-kube-api-access-stbm5\") pod \"watcher-operator-controller-manager-55f78b7c4c-zgnj9\" (UID: \"3b75e9d6-b3a1-46ec-ae83-830583970e9c\") " pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.026093 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6684\" (UniqueName: \"kubernetes.io/projected/a02dee9b-ffed-4a5a-b833-cb236c105371-kube-api-access-f6684\") pod \"test-operator-controller-manager-756ccf86c7-46n8m\" (UID: \"a02dee9b-ffed-4a5a-b833-cb236c105371\") " pod="openstack-operators/test-operator-controller-manager-756ccf86c7-46n8m" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.027971 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.026926 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.050899 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6684\" (UniqueName: \"kubernetes.io/projected/a02dee9b-ffed-4a5a-b833-cb236c105371-kube-api-access-f6684\") pod \"test-operator-controller-manager-756ccf86c7-46n8m\" (UID: \"a02dee9b-ffed-4a5a-b833-cb236c105371\") " pod="openstack-operators/test-operator-controller-manager-756ccf86c7-46n8m" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.053320 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxgdf\" (UniqueName: \"kubernetes.io/projected/1adec510-a153-47b2-ae1d-5430d4ff5e31-kube-api-access-jxgdf\") pod \"telemetry-operator-controller-manager-97d456b9-fqsfn\" (UID: \"1adec510-a153-47b2-ae1d-5430d4ff5e31\") " pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-fqsfn" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.074127 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-fqsfn" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.130656 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h72nz\" (UniqueName: \"kubernetes.io/projected/088c2258-52fc-4a04-b4c8-af259e9d2b75-kube-api-access-h72nz\") pod \"rabbitmq-cluster-operator-manager-668c99d594-gfl6d\" (UID: \"088c2258-52fc-4a04-b4c8-af259e9d2b75\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gfl6d" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.130905 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert\") pod \"infra-operator-controller-manager-58944d7758-4p77w\" (UID: \"b8c3ef08-66ae-474e-8204-2338afb7d08d\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.130968 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stbm5\" (UniqueName: \"kubernetes.io/projected/3b75e9d6-b3a1-46ec-ae83-830583970e9c-kube-api-access-stbm5\") pod \"watcher-operator-controller-manager-55f78b7c4c-zgnj9\" (UID: \"3b75e9d6-b3a1-46ec-ae83-830583970e9c\") " pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.131006 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.131066 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbd5c\" (UniqueName: \"kubernetes.io/projected/48353918-3568-4a9c-a5d2-709fb831ee75-kube-api-access-vbd5c\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.131089 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:16 crc kubenswrapper[4644]: E1213 06:59:16.131182 4644 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 13 06:59:16 crc kubenswrapper[4644]: E1213 06:59:16.131226 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs podName:48353918-3568-4a9c-a5d2-709fb831ee75 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:16.631209742 +0000 UTC m=+818.846160576 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs") pod "openstack-operator-controller-manager-56f6fbdf6-q2xrp" (UID: "48353918-3568-4a9c-a5d2-709fb831ee75") : secret "metrics-server-cert" not found Dec 13 06:59:16 crc kubenswrapper[4644]: E1213 06:59:16.131334 4644 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 13 06:59:16 crc kubenswrapper[4644]: E1213 06:59:16.131358 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs podName:48353918-3568-4a9c-a5d2-709fb831ee75 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:16.63135227 +0000 UTC m=+818.846303103 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs") pod "openstack-operator-controller-manager-56f6fbdf6-q2xrp" (UID: "48353918-3568-4a9c-a5d2-709fb831ee75") : secret "webhook-server-cert" not found Dec 13 06:59:16 crc kubenswrapper[4644]: E1213 06:59:16.131407 4644 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.131488 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-464zb" Dec 13 06:59:16 crc kubenswrapper[4644]: E1213 06:59:16.131492 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert podName:b8c3ef08-66ae-474e-8204-2338afb7d08d nodeName:}" failed. No retries permitted until 2025-12-13 06:59:17.131481514 +0000 UTC m=+819.346432347 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert") pod "infra-operator-controller-manager-58944d7758-4p77w" (UID: "b8c3ef08-66ae-474e-8204-2338afb7d08d") : secret "infra-operator-webhook-server-cert" not found Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.150437 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stbm5\" (UniqueName: \"kubernetes.io/projected/3b75e9d6-b3a1-46ec-ae83-830583970e9c-kube-api-access-stbm5\") pod \"watcher-operator-controller-manager-55f78b7c4c-zgnj9\" (UID: \"3b75e9d6-b3a1-46ec-ae83-830583970e9c\") " pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.150657 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbd5c\" (UniqueName: \"kubernetes.io/projected/48353918-3568-4a9c-a5d2-709fb831ee75-kube-api-access-vbd5c\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.232091 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h72nz\" (UniqueName: \"kubernetes.io/projected/088c2258-52fc-4a04-b4c8-af259e9d2b75-kube-api-access-h72nz\") pod \"rabbitmq-cluster-operator-manager-668c99d594-gfl6d\" (UID: \"088c2258-52fc-4a04-b4c8-af259e9d2b75\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gfl6d" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.248851 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h72nz\" (UniqueName: \"kubernetes.io/projected/088c2258-52fc-4a04-b4c8-af259e9d2b75-kube-api-access-h72nz\") pod \"rabbitmq-cluster-operator-manager-668c99d594-gfl6d\" (UID: \"088c2258-52fc-4a04-b4c8-af259e9d2b75\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gfl6d" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.263307 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-46n8m" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.281321 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.359827 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gfl6d" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.434790 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b544qprx\" (UID: \"7cd61a98-cc77-41b1-a06f-912207565b37\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" Dec 13 06:59:16 crc kubenswrapper[4644]: E1213 06:59:16.434943 4644 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 13 06:59:16 crc kubenswrapper[4644]: E1213 06:59:16.435008 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert podName:7cd61a98-cc77-41b1-a06f-912207565b37 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:17.434992519 +0000 UTC m=+819.649943352 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert") pod "openstack-baremetal-operator-controller-manager-689f887b544qprx" (UID: "7cd61a98-cc77-41b1-a06f-912207565b37") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.487603 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6ccf486b9-zmcbq"] Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.522155 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-95949466-9ffgl"] Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.527091 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5cf45c46bd-tndds"] Dec 13 06:59:16 crc kubenswrapper[4644]: W1213 06:59:16.541740 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod11c2a2ff_6f82_4b30_909b_f0f8c1e92394.slice/crio-0e442e651f043b124c780d2dafd06088eb938028165559eee9bd9607a77d8198 WatchSource:0}: Error finding container 0e442e651f043b124c780d2dafd06088eb938028165559eee9bd9607a77d8198: Status 404 returned error can't find the container with id 0e442e651f043b124c780d2dafd06088eb938028165559eee9bd9607a77d8198 Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.638119 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.638199 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:16 crc kubenswrapper[4644]: E1213 06:59:16.638351 4644 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 13 06:59:16 crc kubenswrapper[4644]: E1213 06:59:16.638418 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs podName:48353918-3568-4a9c-a5d2-709fb831ee75 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:17.638398873 +0000 UTC m=+819.853349706 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs") pod "openstack-operator-controller-manager-56f6fbdf6-q2xrp" (UID: "48353918-3568-4a9c-a5d2-709fb831ee75") : secret "metrics-server-cert" not found Dec 13 06:59:16 crc kubenswrapper[4644]: E1213 06:59:16.639724 4644 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 13 06:59:16 crc kubenswrapper[4644]: E1213 06:59:16.639865 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs podName:48353918-3568-4a9c-a5d2-709fb831ee75 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:17.639831857 +0000 UTC m=+819.854782689 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs") pod "openstack-operator-controller-manager-56f6fbdf6-q2xrp" (UID: "48353918-3568-4a9c-a5d2-709fb831ee75") : secret "webhook-server-cert" not found Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.687836 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-tndds" event={"ID":"11c2a2ff-6f82-4b30-909b-f0f8c1e92394","Type":"ContainerStarted","Data":"0e442e651f043b124c780d2dafd06088eb938028165559eee9bd9607a77d8198"} Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.689043 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-95949466-9ffgl" event={"ID":"3ec726b0-e1c1-497a-9364-f483cdf9b69b","Type":"ContainerStarted","Data":"57974a13fd28b9ac3deb90c538367a80edf0425869a0bd5444672e3444db24e9"} Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.690075 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-zmcbq" event={"ID":"aac5283b-a0c7-4cac-8a72-07ca5444b743","Type":"ContainerStarted","Data":"b28617fd0590915903564d57820b7a5ac81ced9a5f95475168ae696ba875c076"} Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.850292 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-767f9d7567-z2xnf"] Dec 13 06:59:16 crc kubenswrapper[4644]: W1213 06:59:16.872113 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfd6f17a4_40cc_4465_8c67_58c67230344d.slice/crio-c20d150b74bf2beec6b085979200941c7493b6aafa1ffda4f16c1e73ba7b86ce WatchSource:0}: Error finding container c20d150b74bf2beec6b085979200941c7493b6aafa1ffda4f16c1e73ba7b86ce: Status 404 returned error can't find the container with id c20d150b74bf2beec6b085979200941c7493b6aafa1ffda4f16c1e73ba7b86ce Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.891820 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-68c649d9d-k2fqd"] Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.895966 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-59b8dcb766-9m7mp"] Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.902427 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-66f8b87655-h6nxs"] Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.932780 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f"] Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.944187 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp"] Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.952271 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9"] Dec 13 06:59:16 crc kubenswrapper[4644]: W1213 06:59:16.963301 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08cf1d52_d8b7_477f_92c7_1dd2732ff9e3.slice/crio-562a4b5908d300784ca66c60c72237938056d4acf36e256294c1eb354a1f3490 WatchSource:0}: Error finding container 562a4b5908d300784ca66c60c72237938056d4acf36e256294c1eb354a1f3490: Status 404 returned error can't find the container with id 562a4b5908d300784ca66c60c72237938056d4acf36e256294c1eb354a1f3490 Dec 13 06:59:16 crc kubenswrapper[4644]: I1213 06:59:16.989800 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-f458558d7-fhckm"] Dec 13 06:59:16 crc kubenswrapper[4644]: W1213 06:59:16.999087 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod90ea237e_4f56_4008_a2df_d3c404424374.slice/crio-8bb0287f1ad1118b8b5a32dd38269c85dc14404338670fb666def91eddc24ed6 WatchSource:0}: Error finding container 8bb0287f1ad1118b8b5a32dd38269c85dc14404338670fb666def91eddc24ed6: Status 404 returned error can't find the container with id 8bb0287f1ad1118b8b5a32dd38269c85dc14404338670fb666def91eddc24ed6 Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.008706 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-97d456b9-fqsfn"] Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.008696 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vh9gc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-7cd87b778f-vgdnd_openstack-operators(d143ef34-f1db-411d-941b-c229888e22b2): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.009280 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pvdml,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-5c7cbf548f-jfdpn_openstack-operators(5e5582c5-50c3-4c4f-9693-16f2a71543ce): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.009397 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fcsn7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-bf6d4f946-mtbhx_openstack-operators(9f650b3c-af01-4ce4-a702-daab8d5affc5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.009511 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rphhh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-8665b56d78-kfnbp_openstack-operators(2e6fefac-bf85-4f28-a30d-808e83a13141): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.011107 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:961417d59f527d925ac48ff6a11de747d0493315e496e34dc83d76a1a1fff58a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-stbm5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-55f78b7c4c-zgnj9_openstack-operators(3b75e9d6-b3a1-46ec-ae83-830583970e9c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.011183 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp" podUID="2e6fefac-bf85-4f28-a30d-808e83a13141" Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.011239 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd" podUID="d143ef34-f1db-411d-941b-c229888e22b2" Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.011260 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn" podUID="5e5582c5-50c3-4c4f-9693-16f2a71543ce" Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.011278 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx" podUID="9f650b3c-af01-4ce4-a702-daab8d5affc5" Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.012212 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9" podUID="3b75e9d6-b3a1-46ec-ae83-830583970e9c" Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.014453 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:44126f9c6b1d2bf752ddf989e20a4fc4cc1c07723d4fcb78465ccb2f55da6b3a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x8lms,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-5fdd9786f7-58rgg_openstack-operators(42b2a1fb-b5d1-46ff-932e-d831b53febf7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.014532 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg"] Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.015760 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg" podUID="42b2a1fb-b5d1-46ff-932e-d831b53febf7" Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.018867 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-f76f4954c-cmcbp"] Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.020239 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fd75c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5c6df8f9-bvc6f_openstack-operators(da649804-862c-45db-97ee-ad47fed7a72d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.021395 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f" podUID="da649804-862c-45db-97ee-ad47fed7a72d" Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.023065 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h72nz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-gfl6d_openstack-operators(088c2258-52fc-4a04-b4c8-af259e9d2b75): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.023357 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gfl6d"] Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.024140 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gfl6d" podUID="088c2258-52fc-4a04-b4c8-af259e9d2b75" Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.028496 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-464zb"] Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.033745 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd"] Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.038631 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx"] Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.041717 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-756ccf86c7-46n8m"] Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.045291 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn"] Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.147771 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert\") pod \"infra-operator-controller-manager-58944d7758-4p77w\" (UID: \"b8c3ef08-66ae-474e-8204-2338afb7d08d\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.148146 4644 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.148218 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert podName:b8c3ef08-66ae-474e-8204-2338afb7d08d nodeName:}" failed. No retries permitted until 2025-12-13 06:59:19.148201787 +0000 UTC m=+821.363152620 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert") pod "infra-operator-controller-manager-58944d7758-4p77w" (UID: "b8c3ef08-66ae-474e-8204-2338afb7d08d") : secret "infra-operator-webhook-server-cert" not found Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.453391 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b544qprx\" (UID: \"7cd61a98-cc77-41b1-a06f-912207565b37\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.453586 4644 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.453627 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert podName:7cd61a98-cc77-41b1-a06f-912207565b37 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:19.45361589 +0000 UTC m=+821.668566723 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert") pod "openstack-baremetal-operator-controller-manager-689f887b544qprx" (UID: "7cd61a98-cc77-41b1-a06f-912207565b37") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.656198 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.656296 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.656410 4644 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.656470 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs podName:48353918-3568-4a9c-a5d2-709fb831ee75 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:19.656455339 +0000 UTC m=+821.871406171 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs") pod "openstack-operator-controller-manager-56f6fbdf6-q2xrp" (UID: "48353918-3568-4a9c-a5d2-709fb831ee75") : secret "webhook-server-cert" not found Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.656784 4644 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.656818 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs podName:48353918-3568-4a9c-a5d2-709fb831ee75 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:19.656810275 +0000 UTC m=+821.871761108 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs") pod "openstack-operator-controller-manager-56f6fbdf6-q2xrp" (UID: "48353918-3568-4a9c-a5d2-709fb831ee75") : secret "metrics-server-cert" not found Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.700694 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg" event={"ID":"42b2a1fb-b5d1-46ff-932e-d831b53febf7","Type":"ContainerStarted","Data":"d5f7e36ad4a919ebf02ddb24bc421ece59feb0df24a3114e842d380861fc50ce"} Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.703718 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:44126f9c6b1d2bf752ddf989e20a4fc4cc1c07723d4fcb78465ccb2f55da6b3a\\\"\"" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg" podUID="42b2a1fb-b5d1-46ff-932e-d831b53febf7" Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.704809 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx" event={"ID":"9f650b3c-af01-4ce4-a702-daab8d5affc5","Type":"ContainerStarted","Data":"b855118f6eb832e339e7d4d23fb36e7221b56803f06392e4cff9ac848bc4c79a"} Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.707350 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx" podUID="9f650b3c-af01-4ce4-a702-daab8d5affc5" Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.713207 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-k2fqd" event={"ID":"d86f3cba-c9ef-47eb-b04e-8f10ac1b0734","Type":"ContainerStarted","Data":"1b391dfc672ff2bb801d399a271ec8aebdc3dea535c4dedc713f40eb7d5cc157"} Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.727100 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-46n8m" event={"ID":"a02dee9b-ffed-4a5a-b833-cb236c105371","Type":"ContainerStarted","Data":"9b366f0bcace66b91419109727e3378cf9ff7a52b683e01162d4431bb3bbc4e2"} Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.730798 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp" event={"ID":"2e6fefac-bf85-4f28-a30d-808e83a13141","Type":"ContainerStarted","Data":"762d76348c9e872560366e67e9325e8c5524382518d142622f2b130907f95ac0"} Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.733068 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f" event={"ID":"da649804-862c-45db-97ee-ad47fed7a72d","Type":"ContainerStarted","Data":"5f5547c5c4dc951615eb6a99d404dff7a6ee3637bf552b28060d440fd3c23312"} Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.735005 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991\\\"\"" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f" podUID="da649804-862c-45db-97ee-ad47fed7a72d" Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.734987 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-464zb" event={"ID":"08cf1d52-d8b7-477f-92c7-1dd2732ff9e3","Type":"ContainerStarted","Data":"562a4b5908d300784ca66c60c72237938056d4acf36e256294c1eb354a1f3490"} Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.745530 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\"" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp" podUID="2e6fefac-bf85-4f28-a30d-808e83a13141" Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.748489 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gfl6d" event={"ID":"088c2258-52fc-4a04-b4c8-af259e9d2b75","Type":"ContainerStarted","Data":"3a12e1432be4b30cc2fc5caf20402cb060be64963c2d9609192b2e288db1cd8a"} Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.750937 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-fqsfn" event={"ID":"1adec510-a153-47b2-ae1d-5430d4ff5e31","Type":"ContainerStarted","Data":"07a7eee98c808a60e97582df67bb2d4a45b287a87ad8bd07754c976dbebb8ca4"} Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.750882 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gfl6d" podUID="088c2258-52fc-4a04-b4c8-af259e9d2b75" Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.755227 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9" event={"ID":"3b75e9d6-b3a1-46ec-ae83-830583970e9c","Type":"ContainerStarted","Data":"eca85d20702bd2547528281f11231bc82dbe6bc44df865f0ba84732e476bd943"} Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.761731 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:961417d59f527d925ac48ff6a11de747d0493315e496e34dc83d76a1a1fff58a\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9" podUID="3b75e9d6-b3a1-46ec-ae83-830583970e9c" Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.763059 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn" event={"ID":"5e5582c5-50c3-4c4f-9693-16f2a71543ce","Type":"ContainerStarted","Data":"13f2e0941bb71797b3ce0bacdc26f316fd777663098edf04ba23f4f786f76e45"} Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.767309 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn" podUID="5e5582c5-50c3-4c4f-9693-16f2a71543ce" Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.779140 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-z2xnf" event={"ID":"fd6f17a4-40cc-4465-8c67-58c67230344d","Type":"ContainerStarted","Data":"c20d150b74bf2beec6b085979200941c7493b6aafa1ffda4f16c1e73ba7b86ce"} Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.784338 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-h6nxs" event={"ID":"32a38d48-fe84-4ede-860c-ae76de27cbe6","Type":"ContainerStarted","Data":"1936bfa339dfd3c6bf5cfc6362fde2104ca64c8fc13c200c3dec6457c2c913cd"} Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.795471 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-f458558d7-fhckm" event={"ID":"ac41b645-ea22-42ac-846e-fa16d0beaee4","Type":"ContainerStarted","Data":"e51b5e11cf867597e5c00707f22bbfea1890e4c11ca1f52c15e091ce3ff615b8"} Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.797616 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-9m7mp" event={"ID":"98e02ffd-3d31-4b00-8bc7-5f225cdf9fc5","Type":"ContainerStarted","Data":"a74968e4383baf08016878d51f9045105f0b3416d9765cdf4a66cd2fd855e74e"} Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.799123 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd" event={"ID":"d143ef34-f1db-411d-941b-c229888e22b2","Type":"ContainerStarted","Data":"db0ddb7b6755e1236a4b6bb4655ff1007c1a04e209dade2f1032a2daeecb40d9"} Dec 13 06:59:17 crc kubenswrapper[4644]: E1213 06:59:17.800868 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd" podUID="d143ef34-f1db-411d-941b-c229888e22b2" Dec 13 06:59:17 crc kubenswrapper[4644]: I1213 06:59:17.801616 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-cmcbp" event={"ID":"90ea237e-4f56-4008-a2df-d3c404424374","Type":"ContainerStarted","Data":"8bb0287f1ad1118b8b5a32dd38269c85dc14404338670fb666def91eddc24ed6"} Dec 13 06:59:18 crc kubenswrapper[4644]: E1213 06:59:18.816293 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn" podUID="5e5582c5-50c3-4c4f-9693-16f2a71543ce" Dec 13 06:59:18 crc kubenswrapper[4644]: E1213 06:59:18.816334 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:44126f9c6b1d2bf752ddf989e20a4fc4cc1c07723d4fcb78465ccb2f55da6b3a\\\"\"" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg" podUID="42b2a1fb-b5d1-46ff-932e-d831b53febf7" Dec 13 06:59:18 crc kubenswrapper[4644]: E1213 06:59:18.816349 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gfl6d" podUID="088c2258-52fc-4a04-b4c8-af259e9d2b75" Dec 13 06:59:18 crc kubenswrapper[4644]: E1213 06:59:18.816392 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd" podUID="d143ef34-f1db-411d-941b-c229888e22b2" Dec 13 06:59:18 crc kubenswrapper[4644]: E1213 06:59:18.816396 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:961417d59f527d925ac48ff6a11de747d0493315e496e34dc83d76a1a1fff58a\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9" podUID="3b75e9d6-b3a1-46ec-ae83-830583970e9c" Dec 13 06:59:18 crc kubenswrapper[4644]: E1213 06:59:18.816435 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991\\\"\"" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f" podUID="da649804-862c-45db-97ee-ad47fed7a72d" Dec 13 06:59:18 crc kubenswrapper[4644]: E1213 06:59:18.816471 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx" podUID="9f650b3c-af01-4ce4-a702-daab8d5affc5" Dec 13 06:59:18 crc kubenswrapper[4644]: E1213 06:59:18.816544 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\"" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp" podUID="2e6fefac-bf85-4f28-a30d-808e83a13141" Dec 13 06:59:19 crc kubenswrapper[4644]: I1213 06:59:19.185120 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert\") pod \"infra-operator-controller-manager-58944d7758-4p77w\" (UID: \"b8c3ef08-66ae-474e-8204-2338afb7d08d\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" Dec 13 06:59:19 crc kubenswrapper[4644]: E1213 06:59:19.185321 4644 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 13 06:59:19 crc kubenswrapper[4644]: E1213 06:59:19.185369 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert podName:b8c3ef08-66ae-474e-8204-2338afb7d08d nodeName:}" failed. No retries permitted until 2025-12-13 06:59:23.185355844 +0000 UTC m=+825.400306678 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert") pod "infra-operator-controller-manager-58944d7758-4p77w" (UID: "b8c3ef08-66ae-474e-8204-2338afb7d08d") : secret "infra-operator-webhook-server-cert" not found Dec 13 06:59:19 crc kubenswrapper[4644]: I1213 06:59:19.494610 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b544qprx\" (UID: \"7cd61a98-cc77-41b1-a06f-912207565b37\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" Dec 13 06:59:19 crc kubenswrapper[4644]: E1213 06:59:19.495072 4644 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 13 06:59:19 crc kubenswrapper[4644]: E1213 06:59:19.495124 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert podName:7cd61a98-cc77-41b1-a06f-912207565b37 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:23.495110347 +0000 UTC m=+825.710061180 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert") pod "openstack-baremetal-operator-controller-manager-689f887b544qprx" (UID: "7cd61a98-cc77-41b1-a06f-912207565b37") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 13 06:59:19 crc kubenswrapper[4644]: I1213 06:59:19.696718 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:19 crc kubenswrapper[4644]: I1213 06:59:19.696831 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:19 crc kubenswrapper[4644]: E1213 06:59:19.696992 4644 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 13 06:59:19 crc kubenswrapper[4644]: E1213 06:59:19.697044 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs podName:48353918-3568-4a9c-a5d2-709fb831ee75 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:23.697028735 +0000 UTC m=+825.911979568 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs") pod "openstack-operator-controller-manager-56f6fbdf6-q2xrp" (UID: "48353918-3568-4a9c-a5d2-709fb831ee75") : secret "metrics-server-cert" not found Dec 13 06:59:19 crc kubenswrapper[4644]: E1213 06:59:19.697094 4644 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 13 06:59:19 crc kubenswrapper[4644]: E1213 06:59:19.697117 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs podName:48353918-3568-4a9c-a5d2-709fb831ee75 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:23.697110308 +0000 UTC m=+825.912061140 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs") pod "openstack-operator-controller-manager-56f6fbdf6-q2xrp" (UID: "48353918-3568-4a9c-a5d2-709fb831ee75") : secret "webhook-server-cert" not found Dec 13 06:59:23 crc kubenswrapper[4644]: I1213 06:59:23.251710 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert\") pod \"infra-operator-controller-manager-58944d7758-4p77w\" (UID: \"b8c3ef08-66ae-474e-8204-2338afb7d08d\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" Dec 13 06:59:23 crc kubenswrapper[4644]: E1213 06:59:23.251882 4644 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 13 06:59:23 crc kubenswrapper[4644]: E1213 06:59:23.252161 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert podName:b8c3ef08-66ae-474e-8204-2338afb7d08d nodeName:}" failed. No retries permitted until 2025-12-13 06:59:31.252138092 +0000 UTC m=+833.467088926 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert") pod "infra-operator-controller-manager-58944d7758-4p77w" (UID: "b8c3ef08-66ae-474e-8204-2338afb7d08d") : secret "infra-operator-webhook-server-cert" not found Dec 13 06:59:23 crc kubenswrapper[4644]: I1213 06:59:23.560138 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b544qprx\" (UID: \"7cd61a98-cc77-41b1-a06f-912207565b37\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" Dec 13 06:59:23 crc kubenswrapper[4644]: E1213 06:59:23.560318 4644 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 13 06:59:23 crc kubenswrapper[4644]: E1213 06:59:23.560392 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert podName:7cd61a98-cc77-41b1-a06f-912207565b37 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:31.5603738 +0000 UTC m=+833.775324633 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert") pod "openstack-baremetal-operator-controller-manager-689f887b544qprx" (UID: "7cd61a98-cc77-41b1-a06f-912207565b37") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 13 06:59:23 crc kubenswrapper[4644]: I1213 06:59:23.762847 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:23 crc kubenswrapper[4644]: E1213 06:59:23.762999 4644 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 13 06:59:23 crc kubenswrapper[4644]: E1213 06:59:23.763045 4644 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 13 06:59:23 crc kubenswrapper[4644]: I1213 06:59:23.763001 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:23 crc kubenswrapper[4644]: E1213 06:59:23.763049 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs podName:48353918-3568-4a9c-a5d2-709fb831ee75 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:31.763035513 +0000 UTC m=+833.977986347 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs") pod "openstack-operator-controller-manager-56f6fbdf6-q2xrp" (UID: "48353918-3568-4a9c-a5d2-709fb831ee75") : secret "metrics-server-cert" not found Dec 13 06:59:23 crc kubenswrapper[4644]: E1213 06:59:23.763106 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs podName:48353918-3568-4a9c-a5d2-709fb831ee75 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:31.763086449 +0000 UTC m=+833.978037282 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs") pod "openstack-operator-controller-manager-56f6fbdf6-q2xrp" (UID: "48353918-3568-4a9c-a5d2-709fb831ee75") : secret "webhook-server-cert" not found Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.855722 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-zmcbq" event={"ID":"aac5283b-a0c7-4cac-8a72-07ca5444b743","Type":"ContainerStarted","Data":"97a1943204e1c5f03f9a19813f41a3d9a29727af429a6837fb66f4c8873ea709"} Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.856267 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-zmcbq" Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.860027 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-z2xnf" event={"ID":"fd6f17a4-40cc-4465-8c67-58c67230344d","Type":"ContainerStarted","Data":"9cabb9a74f07169d5610aab0a775da19b940de4c40bd6f6e5e89a2d444aad29b"} Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.860428 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-z2xnf" Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.861937 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-tndds" event={"ID":"11c2a2ff-6f82-4b30-909b-f0f8c1e92394","Type":"ContainerStarted","Data":"b2b3d816802f29a546519b2ba700b3cb1590f9bc976c8e080c097e93a81586c5"} Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.862297 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-tndds" Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.863340 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-f458558d7-fhckm" event={"ID":"ac41b645-ea22-42ac-846e-fa16d0beaee4","Type":"ContainerStarted","Data":"c7cef122f44d965fc0a7cf57b8795f7344fefca1d605a4fa78a71fe2459f1b16"} Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.863707 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-f458558d7-fhckm" Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.864636 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-k2fqd" event={"ID":"d86f3cba-c9ef-47eb-b04e-8f10ac1b0734","Type":"ContainerStarted","Data":"57fbb8d3f0b79ffb3cd4d658695c011e30350bd7f0a4134e19034a9744e44eb6"} Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.864990 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-k2fqd" Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.865906 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-464zb" event={"ID":"08cf1d52-d8b7-477f-92c7-1dd2732ff9e3","Type":"ContainerStarted","Data":"4b72ae11abb04a812db0a0ea9b6a848534e11bb10954fb94d1141426a60637c3"} Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.866230 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-464zb" Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.867117 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-cmcbp" event={"ID":"90ea237e-4f56-4008-a2df-d3c404424374","Type":"ContainerStarted","Data":"12bac91531d1f2723cb2b14b04eac7fcc6d447c3036a962ad0bdc78b887f47bf"} Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.867470 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-cmcbp" Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.868401 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-95949466-9ffgl" event={"ID":"3ec726b0-e1c1-497a-9364-f483cdf9b69b","Type":"ContainerStarted","Data":"a6210a86c2a414b7dbadbb66356b261ff3092b2a968514cf99a9c10b158df2a6"} Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.868757 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-95949466-9ffgl" Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.869725 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-46n8m" event={"ID":"a02dee9b-ffed-4a5a-b833-cb236c105371","Type":"ContainerStarted","Data":"455b941779b62ce63ff7a2b6ecbe7872ba833a430246e21299bc983a54c04188"} Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.870052 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-46n8m" Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.875234 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-h6nxs" event={"ID":"32a38d48-fe84-4ede-860c-ae76de27cbe6","Type":"ContainerStarted","Data":"3ca185c955eeb95c8f8aeaef9ea9e2bc7ca5baddede81970a89b8aedc00b575b"} Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.875618 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-h6nxs" Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.876620 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-fqsfn" event={"ID":"1adec510-a153-47b2-ae1d-5430d4ff5e31","Type":"ContainerStarted","Data":"6907f0fa860435b3c8725cc3cd499797b690e4e352683c8f14b9d81bc3e5b15a"} Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.876962 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-fqsfn" Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.877938 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-9m7mp" event={"ID":"98e02ffd-3d31-4b00-8bc7-5f225cdf9fc5","Type":"ContainerStarted","Data":"f7f082614d1e2b707d1c2a8c40a5888e593ea21c92a68b469fa3f1eff1a75d6a"} Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.878254 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-9m7mp" Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.894336 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-zmcbq" podStartSLOduration=2.07725507 podStartE2EDuration="10.894324451s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:16.513481965 +0000 UTC m=+818.728432798" lastFinishedPulling="2025-12-13 06:59:25.330551346 +0000 UTC m=+827.545502179" observedRunningTime="2025-12-13 06:59:25.887959575 +0000 UTC m=+828.102910408" watchObservedRunningTime="2025-12-13 06:59:25.894324451 +0000 UTC m=+828.109275284" Dec 13 06:59:25 crc kubenswrapper[4644]: I1213 06:59:25.925761 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-z2xnf" podStartSLOduration=2.4919733 podStartE2EDuration="10.925744779s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:16.874808792 +0000 UTC m=+819.089759625" lastFinishedPulling="2025-12-13 06:59:25.308580271 +0000 UTC m=+827.523531104" observedRunningTime="2025-12-13 06:59:25.92250055 +0000 UTC m=+828.137451384" watchObservedRunningTime="2025-12-13 06:59:25.925744779 +0000 UTC m=+828.140695612" Dec 13 06:59:26 crc kubenswrapper[4644]: I1213 06:59:26.034201 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-f458558d7-fhckm" podStartSLOduration=2.6844923769999998 podStartE2EDuration="11.034185362s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:16.960939804 +0000 UTC m=+819.175890637" lastFinishedPulling="2025-12-13 06:59:25.310632789 +0000 UTC m=+827.525583622" observedRunningTime="2025-12-13 06:59:26.030735406 +0000 UTC m=+828.245686239" watchObservedRunningTime="2025-12-13 06:59:26.034185362 +0000 UTC m=+828.249136195" Dec 13 06:59:26 crc kubenswrapper[4644]: I1213 06:59:26.238757 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-46n8m" podStartSLOduration=2.918572255 podStartE2EDuration="11.238735096s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:16.989201464 +0000 UTC m=+819.204152297" lastFinishedPulling="2025-12-13 06:59:25.309364306 +0000 UTC m=+827.524315138" observedRunningTime="2025-12-13 06:59:26.155238736 +0000 UTC m=+828.370189569" watchObservedRunningTime="2025-12-13 06:59:26.238735096 +0000 UTC m=+828.453685929" Dec 13 06:59:26 crc kubenswrapper[4644]: I1213 06:59:26.239096 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-k2fqd" podStartSLOduration=2.864501364 podStartE2EDuration="11.239092067s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:16.917487104 +0000 UTC m=+819.132437937" lastFinishedPulling="2025-12-13 06:59:25.292077808 +0000 UTC m=+827.507028640" observedRunningTime="2025-12-13 06:59:26.235590324 +0000 UTC m=+828.450541157" watchObservedRunningTime="2025-12-13 06:59:26.239092067 +0000 UTC m=+828.454042899" Dec 13 06:59:26 crc kubenswrapper[4644]: I1213 06:59:26.291873 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-fqsfn" podStartSLOduration=2.955381806 podStartE2EDuration="11.291854187s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:16.95970357 +0000 UTC m=+819.174654403" lastFinishedPulling="2025-12-13 06:59:25.296175952 +0000 UTC m=+827.511126784" observedRunningTime="2025-12-13 06:59:26.289273796 +0000 UTC m=+828.504224629" watchObservedRunningTime="2025-12-13 06:59:26.291854187 +0000 UTC m=+828.506805020" Dec 13 06:59:26 crc kubenswrapper[4644]: I1213 06:59:26.356596 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-9m7mp" podStartSLOduration=2.9849357039999997 podStartE2EDuration="11.35658107s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:16.937704903 +0000 UTC m=+819.152655736" lastFinishedPulling="2025-12-13 06:59:25.309350269 +0000 UTC m=+827.524301102" observedRunningTime="2025-12-13 06:59:26.353319999 +0000 UTC m=+828.568270832" watchObservedRunningTime="2025-12-13 06:59:26.35658107 +0000 UTC m=+828.571531904" Dec 13 06:59:26 crc kubenswrapper[4644]: I1213 06:59:26.422866 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-h6nxs" podStartSLOduration=3.060202593 podStartE2EDuration="11.422847989s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:16.940566613 +0000 UTC m=+819.155517446" lastFinishedPulling="2025-12-13 06:59:25.303212008 +0000 UTC m=+827.518162842" observedRunningTime="2025-12-13 06:59:26.420638405 +0000 UTC m=+828.635589238" watchObservedRunningTime="2025-12-13 06:59:26.422847989 +0000 UTC m=+828.637798822" Dec 13 06:59:26 crc kubenswrapper[4644]: I1213 06:59:26.475350 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-tndds" podStartSLOduration=2.753784291 podStartE2EDuration="11.475325393s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:16.546590276 +0000 UTC m=+818.761541110" lastFinishedPulling="2025-12-13 06:59:25.268131378 +0000 UTC m=+827.483082212" observedRunningTime="2025-12-13 06:59:26.472336595 +0000 UTC m=+828.687287428" watchObservedRunningTime="2025-12-13 06:59:26.475325393 +0000 UTC m=+828.690276227" Dec 13 06:59:26 crc kubenswrapper[4644]: I1213 06:59:26.492029 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-95949466-9ffgl" podStartSLOduration=2.706280216 podStartE2EDuration="11.492013375s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:16.536831762 +0000 UTC m=+818.751782595" lastFinishedPulling="2025-12-13 06:59:25.322564921 +0000 UTC m=+827.537515754" observedRunningTime="2025-12-13 06:59:26.486335923 +0000 UTC m=+828.701286756" watchObservedRunningTime="2025-12-13 06:59:26.492013375 +0000 UTC m=+828.706964199" Dec 13 06:59:26 crc kubenswrapper[4644]: I1213 06:59:26.510334 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-464zb" podStartSLOduration=3.15005971 podStartE2EDuration="11.510315654s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:16.982126103 +0000 UTC m=+819.197076936" lastFinishedPulling="2025-12-13 06:59:25.342382057 +0000 UTC m=+827.557332880" observedRunningTime="2025-12-13 06:59:26.508363032 +0000 UTC m=+828.723313865" watchObservedRunningTime="2025-12-13 06:59:26.510315654 +0000 UTC m=+828.725266487" Dec 13 06:59:26 crc kubenswrapper[4644]: I1213 06:59:26.533097 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-cmcbp" podStartSLOduration=3.245218324 podStartE2EDuration="11.533079137s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:17.00659337 +0000 UTC m=+819.221544204" lastFinishedPulling="2025-12-13 06:59:25.294454184 +0000 UTC m=+827.509405017" observedRunningTime="2025-12-13 06:59:26.52875063 +0000 UTC m=+828.743701464" watchObservedRunningTime="2025-12-13 06:59:26.533079137 +0000 UTC m=+828.748029971" Dec 13 06:59:31 crc kubenswrapper[4644]: I1213 06:59:31.264819 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert\") pod \"infra-operator-controller-manager-58944d7758-4p77w\" (UID: \"b8c3ef08-66ae-474e-8204-2338afb7d08d\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" Dec 13 06:59:31 crc kubenswrapper[4644]: I1213 06:59:31.269008 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8c3ef08-66ae-474e-8204-2338afb7d08d-cert\") pod \"infra-operator-controller-manager-58944d7758-4p77w\" (UID: \"b8c3ef08-66ae-474e-8204-2338afb7d08d\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" Dec 13 06:59:31 crc kubenswrapper[4644]: I1213 06:59:31.538867 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" Dec 13 06:59:31 crc kubenswrapper[4644]: I1213 06:59:31.568713 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b544qprx\" (UID: \"7cd61a98-cc77-41b1-a06f-912207565b37\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" Dec 13 06:59:31 crc kubenswrapper[4644]: I1213 06:59:31.571501 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7cd61a98-cc77-41b1-a06f-912207565b37-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b544qprx\" (UID: \"7cd61a98-cc77-41b1-a06f-912207565b37\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" Dec 13 06:59:31 crc kubenswrapper[4644]: I1213 06:59:31.771339 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:31 crc kubenswrapper[4644]: I1213 06:59:31.771436 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:31 crc kubenswrapper[4644]: E1213 06:59:31.771545 4644 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 13 06:59:31 crc kubenswrapper[4644]: E1213 06:59:31.771598 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs podName:48353918-3568-4a9c-a5d2-709fb831ee75 nodeName:}" failed. No retries permitted until 2025-12-13 06:59:47.771584105 +0000 UTC m=+849.986534938 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs") pod "openstack-operator-controller-manager-56f6fbdf6-q2xrp" (UID: "48353918-3568-4a9c-a5d2-709fb831ee75") : secret "webhook-server-cert" not found Dec 13 06:59:31 crc kubenswrapper[4644]: I1213 06:59:31.774143 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:31 crc kubenswrapper[4644]: I1213 06:59:31.837162 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" Dec 13 06:59:32 crc kubenswrapper[4644]: I1213 06:59:32.360806 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-58944d7758-4p77w"] Dec 13 06:59:32 crc kubenswrapper[4644]: W1213 06:59:32.368770 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8c3ef08_66ae_474e_8204_2338afb7d08d.slice/crio-0353f7f6b96085586e28b9934008fa7c26c0a34ccd3c7ebd726f09cf2c17c89d WatchSource:0}: Error finding container 0353f7f6b96085586e28b9934008fa7c26c0a34ccd3c7ebd726f09cf2c17c89d: Status 404 returned error can't find the container with id 0353f7f6b96085586e28b9934008fa7c26c0a34ccd3c7ebd726f09cf2c17c89d Dec 13 06:59:32 crc kubenswrapper[4644]: I1213 06:59:32.405178 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx"] Dec 13 06:59:32 crc kubenswrapper[4644]: I1213 06:59:32.915453 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" event={"ID":"b8c3ef08-66ae-474e-8204-2338afb7d08d","Type":"ContainerStarted","Data":"0353f7f6b96085586e28b9934008fa7c26c0a34ccd3c7ebd726f09cf2c17c89d"} Dec 13 06:59:32 crc kubenswrapper[4644]: I1213 06:59:32.917686 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" event={"ID":"7cd61a98-cc77-41b1-a06f-912207565b37","Type":"ContainerStarted","Data":"406c005f8b3c362c314b9ce502c951ecfdde43f5aa93dd79a2ff1f6dc046dc32"} Dec 13 06:59:35 crc kubenswrapper[4644]: I1213 06:59:35.502965 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-95949466-9ffgl" Dec 13 06:59:35 crc kubenswrapper[4644]: I1213 06:59:35.524051 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-tndds" Dec 13 06:59:35 crc kubenswrapper[4644]: I1213 06:59:35.581770 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-zmcbq" Dec 13 06:59:35 crc kubenswrapper[4644]: I1213 06:59:35.596755 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-9m7mp" Dec 13 06:59:35 crc kubenswrapper[4644]: I1213 06:59:35.652519 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-f458558d7-fhckm" Dec 13 06:59:35 crc kubenswrapper[4644]: I1213 06:59:35.786487 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-cmcbp" Dec 13 06:59:35 crc kubenswrapper[4644]: I1213 06:59:35.823593 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-h6nxs" Dec 13 06:59:35 crc kubenswrapper[4644]: I1213 06:59:35.856790 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-z2xnf" Dec 13 06:59:35 crc kubenswrapper[4644]: I1213 06:59:35.911031 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-k2fqd" Dec 13 06:59:36 crc kubenswrapper[4644]: I1213 06:59:36.077419 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-fqsfn" Dec 13 06:59:36 crc kubenswrapper[4644]: I1213 06:59:36.133733 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-464zb" Dec 13 06:59:36 crc kubenswrapper[4644]: I1213 06:59:36.272073 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-46n8m" Dec 13 06:59:42 crc kubenswrapper[4644]: I1213 06:59:42.989098 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f" event={"ID":"da649804-862c-45db-97ee-ad47fed7a72d","Type":"ContainerStarted","Data":"1d6588731f12410f1cbc0f53fd486540491ba5123707114bf9953a20e0a7565a"} Dec 13 06:59:42 crc kubenswrapper[4644]: I1213 06:59:42.991962 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f" Dec 13 06:59:42 crc kubenswrapper[4644]: I1213 06:59:42.993588 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" event={"ID":"b8c3ef08-66ae-474e-8204-2338afb7d08d","Type":"ContainerStarted","Data":"39871fdc9a2126b6eda789a0191035aebf2823f05a593e4420dcf6ee57a9d4df"} Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.008886 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gfl6d" event={"ID":"088c2258-52fc-4a04-b4c8-af259e9d2b75","Type":"ContainerStarted","Data":"cbdd9ca1dcb8ebf189ddc12e073134809b443a8a42a0e277186a88f7aaa52b77"} Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.017301 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f" podStartSLOduration=2.891030969 podStartE2EDuration="28.017282647s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:17.020144165 +0000 UTC m=+819.235094998" lastFinishedPulling="2025-12-13 06:59:42.146395844 +0000 UTC m=+844.361346676" observedRunningTime="2025-12-13 06:59:43.015501669 +0000 UTC m=+845.230452502" watchObservedRunningTime="2025-12-13 06:59:43.017282647 +0000 UTC m=+845.232233480" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.023734 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg" event={"ID":"42b2a1fb-b5d1-46ff-932e-d831b53febf7","Type":"ContainerStarted","Data":"a398e0b29af30c43915f0ade66cb2c4cfe77c32b07ed6cae47430d8f631cccb6"} Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.024838 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.025541 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" event={"ID":"7cd61a98-cc77-41b1-a06f-912207565b37","Type":"ContainerStarted","Data":"8f30e0f35c49c60b2cacdf99fee483e8bc681a2bf469ce07dd98e6453468bc9a"} Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.026176 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.027066 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx" event={"ID":"9f650b3c-af01-4ce4-a702-daab8d5affc5","Type":"ContainerStarted","Data":"33a4c7d61f9be2b30009ffa950736a7d6b58554233d1029e38870c5959381932"} Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.027617 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.042635 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp" event={"ID":"2e6fefac-bf85-4f28-a30d-808e83a13141","Type":"ContainerStarted","Data":"ca199da85d4bfdd3168e9cb9463e1cc5cc6ca49d9e3ef9f15cfd22e988a5ea5f"} Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.045715 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.046610 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9" event={"ID":"3b75e9d6-b3a1-46ec-ae83-830583970e9c","Type":"ContainerStarted","Data":"c42c232baf00da346bbee8e97f4a5367891b006ed6debee20446d6e50321c3d7"} Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.048478 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.077928 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-gfl6d" podStartSLOduration=2.953178604 podStartE2EDuration="28.077908932s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:17.022956141 +0000 UTC m=+819.237906975" lastFinishedPulling="2025-12-13 06:59:42.14768647 +0000 UTC m=+844.362637303" observedRunningTime="2025-12-13 06:59:43.05566828 +0000 UTC m=+845.270619114" watchObservedRunningTime="2025-12-13 06:59:43.077908932 +0000 UTC m=+845.292859765" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.078954 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd" event={"ID":"d143ef34-f1db-411d-941b-c229888e22b2","Type":"ContainerStarted","Data":"8faf2e0aeb1754e7fb6daad2a658d5a40ef444545653af9d25bf1066752265ae"} Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.079726 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.081695 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" podStartSLOduration=18.328821062 podStartE2EDuration="28.081684128s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:32.370316512 +0000 UTC m=+834.585267345" lastFinishedPulling="2025-12-13 06:59:42.123179578 +0000 UTC m=+844.338130411" observedRunningTime="2025-12-13 06:59:43.078926133 +0000 UTC m=+845.293876967" watchObservedRunningTime="2025-12-13 06:59:43.081684128 +0000 UTC m=+845.296634961" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.088571 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn" event={"ID":"5e5582c5-50c3-4c4f-9693-16f2a71543ce","Type":"ContainerStarted","Data":"3147d8f7299eda63caca789664b051158acf5919b00e82d0bbef8af76973d60c"} Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.089100 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.109379 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg" podStartSLOduration=3.024359831 podStartE2EDuration="28.109361051s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:17.014293776 +0000 UTC m=+819.229244609" lastFinishedPulling="2025-12-13 06:59:42.099294995 +0000 UTC m=+844.314245829" observedRunningTime="2025-12-13 06:59:43.108801057 +0000 UTC m=+845.323751890" watchObservedRunningTime="2025-12-13 06:59:43.109361051 +0000 UTC m=+845.324311883" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.129453 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp" podStartSLOduration=3.038566698 podStartE2EDuration="28.129421543s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:17.009453216 +0000 UTC m=+819.224404049" lastFinishedPulling="2025-12-13 06:59:42.10030806 +0000 UTC m=+844.315258894" observedRunningTime="2025-12-13 06:59:43.127966046 +0000 UTC m=+845.342916879" watchObservedRunningTime="2025-12-13 06:59:43.129421543 +0000 UTC m=+845.344372376" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.148678 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9" podStartSLOduration=3.029802582 podStartE2EDuration="28.148657936s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:17.0073432 +0000 UTC m=+819.222294032" lastFinishedPulling="2025-12-13 06:59:42.126198554 +0000 UTC m=+844.341149386" observedRunningTime="2025-12-13 06:59:43.147257914 +0000 UTC m=+845.362208747" watchObservedRunningTime="2025-12-13 06:59:43.148657936 +0000 UTC m=+845.363608769" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.213496 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" podStartSLOduration=18.534756751 podStartE2EDuration="28.213480489s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:32.42263096 +0000 UTC m=+834.637581794" lastFinishedPulling="2025-12-13 06:59:42.101354699 +0000 UTC m=+844.316305532" observedRunningTime="2025-12-13 06:59:43.208838963 +0000 UTC m=+845.423789796" watchObservedRunningTime="2025-12-13 06:59:43.213480489 +0000 UTC m=+845.428431323" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.235799 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx" podStartSLOduration=3.8687275 podStartE2EDuration="28.235780633s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:17.009332289 +0000 UTC m=+819.224283122" lastFinishedPulling="2025-12-13 06:59:41.376385423 +0000 UTC m=+843.591336255" observedRunningTime="2025-12-13 06:59:43.234813335 +0000 UTC m=+845.449764167" watchObservedRunningTime="2025-12-13 06:59:43.235780633 +0000 UTC m=+845.450731465" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.272694 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn" podStartSLOduration=3.155207508 podStartE2EDuration="28.272678008s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:17.00915732 +0000 UTC m=+819.224108153" lastFinishedPulling="2025-12-13 06:59:42.12662782 +0000 UTC m=+844.341578653" observedRunningTime="2025-12-13 06:59:43.270029258 +0000 UTC m=+845.484980091" watchObservedRunningTime="2025-12-13 06:59:43.272678008 +0000 UTC m=+845.487628841" Dec 13 06:59:43 crc kubenswrapper[4644]: I1213 06:59:43.295205 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd" podStartSLOduration=3.204296764 podStartE2EDuration="28.295186883s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="2025-12-13 06:59:17.008392062 +0000 UTC m=+819.223342895" lastFinishedPulling="2025-12-13 06:59:42.099282181 +0000 UTC m=+844.314233014" observedRunningTime="2025-12-13 06:59:43.292842025 +0000 UTC m=+845.507792858" watchObservedRunningTime="2025-12-13 06:59:43.295186883 +0000 UTC m=+845.510137716" Dec 13 06:59:44 crc kubenswrapper[4644]: I1213 06:59:44.094590 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" Dec 13 06:59:47 crc kubenswrapper[4644]: I1213 06:59:47.797101 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:47 crc kubenswrapper[4644]: I1213 06:59:47.802721 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/48353918-3568-4a9c-a5d2-709fb831ee75-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-q2xrp\" (UID: \"48353918-3568-4a9c-a5d2-709fb831ee75\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:48 crc kubenswrapper[4644]: I1213 06:59:48.101126 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-d7t4k" Dec 13 06:59:48 crc kubenswrapper[4644]: I1213 06:59:48.109270 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:48 crc kubenswrapper[4644]: I1213 06:59:48.459128 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp"] Dec 13 06:59:48 crc kubenswrapper[4644]: W1213 06:59:48.464105 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48353918_3568_4a9c_a5d2_709fb831ee75.slice/crio-b5140eefeb22010f84d7354396ef1916b2c39298a1b3bb0dfcf929c0b91c3dad WatchSource:0}: Error finding container b5140eefeb22010f84d7354396ef1916b2c39298a1b3bb0dfcf929c0b91c3dad: Status 404 returned error can't find the container with id b5140eefeb22010f84d7354396ef1916b2c39298a1b3bb0dfcf929c0b91c3dad Dec 13 06:59:49 crc kubenswrapper[4644]: I1213 06:59:49.122203 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" event={"ID":"48353918-3568-4a9c-a5d2-709fb831ee75","Type":"ContainerStarted","Data":"cc1e97875227a67ff39f234f493c7d589cd93eaa6f40eff1de511e619f2e7637"} Dec 13 06:59:49 crc kubenswrapper[4644]: I1213 06:59:49.122407 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 06:59:49 crc kubenswrapper[4644]: I1213 06:59:49.122421 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" event={"ID":"48353918-3568-4a9c-a5d2-709fb831ee75","Type":"ContainerStarted","Data":"b5140eefeb22010f84d7354396ef1916b2c39298a1b3bb0dfcf929c0b91c3dad"} Dec 13 06:59:49 crc kubenswrapper[4644]: I1213 06:59:49.142850 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" podStartSLOduration=34.142837263 podStartE2EDuration="34.142837263s" podCreationTimestamp="2025-12-13 06:59:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 06:59:49.140428505 +0000 UTC m=+851.355379337" watchObservedRunningTime="2025-12-13 06:59:49.142837263 +0000 UTC m=+851.357788096" Dec 13 06:59:51 crc kubenswrapper[4644]: I1213 06:59:51.544683 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-58944d7758-4p77w" Dec 13 06:59:51 crc kubenswrapper[4644]: I1213 06:59:51.842588 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b544qprx" Dec 13 06:59:55 crc kubenswrapper[4644]: I1213 06:59:55.705789 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-jfdpn" Dec 13 06:59:55 crc kubenswrapper[4644]: I1213 06:59:55.786062 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-58rgg" Dec 13 06:59:55 crc kubenswrapper[4644]: I1213 06:59:55.794623 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-vgdnd" Dec 13 06:59:55 crc kubenswrapper[4644]: I1213 06:59:55.989481 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-mtbhx" Dec 13 06:59:56 crc kubenswrapper[4644]: I1213 06:59:56.010990 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-kfnbp" Dec 13 06:59:56 crc kubenswrapper[4644]: I1213 06:59:56.029357 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-bvc6f" Dec 13 06:59:56 crc kubenswrapper[4644]: I1213 06:59:56.284305 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-zgnj9" Dec 13 06:59:58 crc kubenswrapper[4644]: I1213 06:59:58.116589 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-q2xrp" Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.156128 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5"] Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.157504 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.159377 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.160020 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.166827 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5"] Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.261519 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e5596a26-06a8-4a58-941e-704b097d00d1-secret-volume\") pod \"collect-profiles-29426820-vnzd5\" (UID: \"e5596a26-06a8-4a58-941e-704b097d00d1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.261697 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e5596a26-06a8-4a58-941e-704b097d00d1-config-volume\") pod \"collect-profiles-29426820-vnzd5\" (UID: \"e5596a26-06a8-4a58-941e-704b097d00d1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.261770 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27jh8\" (UniqueName: \"kubernetes.io/projected/e5596a26-06a8-4a58-941e-704b097d00d1-kube-api-access-27jh8\") pod \"collect-profiles-29426820-vnzd5\" (UID: \"e5596a26-06a8-4a58-941e-704b097d00d1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.363154 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e5596a26-06a8-4a58-941e-704b097d00d1-secret-volume\") pod \"collect-profiles-29426820-vnzd5\" (UID: \"e5596a26-06a8-4a58-941e-704b097d00d1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.363244 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e5596a26-06a8-4a58-941e-704b097d00d1-config-volume\") pod \"collect-profiles-29426820-vnzd5\" (UID: \"e5596a26-06a8-4a58-941e-704b097d00d1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.363302 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27jh8\" (UniqueName: \"kubernetes.io/projected/e5596a26-06a8-4a58-941e-704b097d00d1-kube-api-access-27jh8\") pod \"collect-profiles-29426820-vnzd5\" (UID: \"e5596a26-06a8-4a58-941e-704b097d00d1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.364064 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e5596a26-06a8-4a58-941e-704b097d00d1-config-volume\") pod \"collect-profiles-29426820-vnzd5\" (UID: \"e5596a26-06a8-4a58-941e-704b097d00d1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.367688 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e5596a26-06a8-4a58-941e-704b097d00d1-secret-volume\") pod \"collect-profiles-29426820-vnzd5\" (UID: \"e5596a26-06a8-4a58-941e-704b097d00d1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.375523 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27jh8\" (UniqueName: \"kubernetes.io/projected/e5596a26-06a8-4a58-941e-704b097d00d1-kube-api-access-27jh8\") pod \"collect-profiles-29426820-vnzd5\" (UID: \"e5596a26-06a8-4a58-941e-704b097d00d1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.477796 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" Dec 13 07:00:00 crc kubenswrapper[4644]: I1213 07:00:00.839054 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5"] Dec 13 07:00:01 crc kubenswrapper[4644]: I1213 07:00:01.197639 4644 generic.go:334] "Generic (PLEG): container finished" podID="e5596a26-06a8-4a58-941e-704b097d00d1" containerID="f53844b783500d348cc7a5ca2a3b8b27f9e718c2a1632d65ed7835d29d581b8b" exitCode=0 Dec 13 07:00:01 crc kubenswrapper[4644]: I1213 07:00:01.197879 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" event={"ID":"e5596a26-06a8-4a58-941e-704b097d00d1","Type":"ContainerDied","Data":"f53844b783500d348cc7a5ca2a3b8b27f9e718c2a1632d65ed7835d29d581b8b"} Dec 13 07:00:01 crc kubenswrapper[4644]: I1213 07:00:01.197906 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" event={"ID":"e5596a26-06a8-4a58-941e-704b097d00d1","Type":"ContainerStarted","Data":"22bee75b23da87c48776a24d5b2c0dd2058f12227922143cebcbf90003ff4db3"} Dec 13 07:00:02 crc kubenswrapper[4644]: I1213 07:00:02.396733 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" Dec 13 07:00:02 crc kubenswrapper[4644]: I1213 07:00:02.494808 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e5596a26-06a8-4a58-941e-704b097d00d1-secret-volume\") pod \"e5596a26-06a8-4a58-941e-704b097d00d1\" (UID: \"e5596a26-06a8-4a58-941e-704b097d00d1\") " Dec 13 07:00:02 crc kubenswrapper[4644]: I1213 07:00:02.495168 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27jh8\" (UniqueName: \"kubernetes.io/projected/e5596a26-06a8-4a58-941e-704b097d00d1-kube-api-access-27jh8\") pod \"e5596a26-06a8-4a58-941e-704b097d00d1\" (UID: \"e5596a26-06a8-4a58-941e-704b097d00d1\") " Dec 13 07:00:02 crc kubenswrapper[4644]: I1213 07:00:02.495340 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e5596a26-06a8-4a58-941e-704b097d00d1-config-volume\") pod \"e5596a26-06a8-4a58-941e-704b097d00d1\" (UID: \"e5596a26-06a8-4a58-941e-704b097d00d1\") " Dec 13 07:00:02 crc kubenswrapper[4644]: I1213 07:00:02.496012 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5596a26-06a8-4a58-941e-704b097d00d1-config-volume" (OuterVolumeSpecName: "config-volume") pod "e5596a26-06a8-4a58-941e-704b097d00d1" (UID: "e5596a26-06a8-4a58-941e-704b097d00d1"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:02 crc kubenswrapper[4644]: I1213 07:00:02.496759 4644 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e5596a26-06a8-4a58-941e-704b097d00d1-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:02 crc kubenswrapper[4644]: I1213 07:00:02.500264 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5596a26-06a8-4a58-941e-704b097d00d1-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e5596a26-06a8-4a58-941e-704b097d00d1" (UID: "e5596a26-06a8-4a58-941e-704b097d00d1"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:00:02 crc kubenswrapper[4644]: I1213 07:00:02.500549 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5596a26-06a8-4a58-941e-704b097d00d1-kube-api-access-27jh8" (OuterVolumeSpecName: "kube-api-access-27jh8") pod "e5596a26-06a8-4a58-941e-704b097d00d1" (UID: "e5596a26-06a8-4a58-941e-704b097d00d1"). InnerVolumeSpecName "kube-api-access-27jh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:00:02 crc kubenswrapper[4644]: I1213 07:00:02.597766 4644 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e5596a26-06a8-4a58-941e-704b097d00d1-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:02 crc kubenswrapper[4644]: I1213 07:00:02.597805 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27jh8\" (UniqueName: \"kubernetes.io/projected/e5596a26-06a8-4a58-941e-704b097d00d1-kube-api-access-27jh8\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:03 crc kubenswrapper[4644]: I1213 07:00:03.216498 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" event={"ID":"e5596a26-06a8-4a58-941e-704b097d00d1","Type":"ContainerDied","Data":"22bee75b23da87c48776a24d5b2c0dd2058f12227922143cebcbf90003ff4db3"} Dec 13 07:00:03 crc kubenswrapper[4644]: I1213 07:00:03.216581 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22bee75b23da87c48776a24d5b2c0dd2058f12227922143cebcbf90003ff4db3" Dec 13 07:00:03 crc kubenswrapper[4644]: I1213 07:00:03.216529 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-vnzd5" Dec 13 07:00:09 crc kubenswrapper[4644]: I1213 07:00:09.753436 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:00:09 crc kubenswrapper[4644]: I1213 07:00:09.753883 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:00:10 crc kubenswrapper[4644]: I1213 07:00:10.884765 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-wdkj8"] Dec 13 07:00:10 crc kubenswrapper[4644]: E1213 07:00:10.885345 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5596a26-06a8-4a58-941e-704b097d00d1" containerName="collect-profiles" Dec 13 07:00:10 crc kubenswrapper[4644]: I1213 07:00:10.885359 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5596a26-06a8-4a58-941e-704b097d00d1" containerName="collect-profiles" Dec 13 07:00:10 crc kubenswrapper[4644]: I1213 07:00:10.885523 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5596a26-06a8-4a58-941e-704b097d00d1" containerName="collect-profiles" Dec 13 07:00:10 crc kubenswrapper[4644]: I1213 07:00:10.886317 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-wdkj8" Dec 13 07:00:10 crc kubenswrapper[4644]: I1213 07:00:10.888329 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 13 07:00:10 crc kubenswrapper[4644]: I1213 07:00:10.888990 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-h24fg" Dec 13 07:00:10 crc kubenswrapper[4644]: I1213 07:00:10.889217 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 13 07:00:10 crc kubenswrapper[4644]: I1213 07:00:10.891276 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 13 07:00:10 crc kubenswrapper[4644]: I1213 07:00:10.896409 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-wdkj8"] Dec 13 07:00:10 crc kubenswrapper[4644]: I1213 07:00:10.929238 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bcb1452-900e-4d5f-bd69-65d50257cd6c-config\") pod \"dnsmasq-dns-84bb9d8bd9-wdkj8\" (UID: \"1bcb1452-900e-4d5f-bd69-65d50257cd6c\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-wdkj8" Dec 13 07:00:10 crc kubenswrapper[4644]: I1213 07:00:10.929315 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlknb\" (UniqueName: \"kubernetes.io/projected/1bcb1452-900e-4d5f-bd69-65d50257cd6c-kube-api-access-jlknb\") pod \"dnsmasq-dns-84bb9d8bd9-wdkj8\" (UID: \"1bcb1452-900e-4d5f-bd69-65d50257cd6c\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-wdkj8" Dec 13 07:00:10 crc kubenswrapper[4644]: I1213 07:00:10.939436 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-tfctb"] Dec 13 07:00:10 crc kubenswrapper[4644]: I1213 07:00:10.941045 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-tfctb" Dec 13 07:00:10 crc kubenswrapper[4644]: I1213 07:00:10.943263 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 13 07:00:10 crc kubenswrapper[4644]: I1213 07:00:10.954229 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-tfctb"] Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.030323 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlknb\" (UniqueName: \"kubernetes.io/projected/1bcb1452-900e-4d5f-bd69-65d50257cd6c-kube-api-access-jlknb\") pod \"dnsmasq-dns-84bb9d8bd9-wdkj8\" (UID: \"1bcb1452-900e-4d5f-bd69-65d50257cd6c\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-wdkj8" Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.030578 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7a4d1d4-1933-45f7-a100-52315335b741-config\") pod \"dnsmasq-dns-5f854695bc-tfctb\" (UID: \"b7a4d1d4-1933-45f7-a100-52315335b741\") " pod="openstack/dnsmasq-dns-5f854695bc-tfctb" Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.030609 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7a4d1d4-1933-45f7-a100-52315335b741-dns-svc\") pod \"dnsmasq-dns-5f854695bc-tfctb\" (UID: \"b7a4d1d4-1933-45f7-a100-52315335b741\") " pod="openstack/dnsmasq-dns-5f854695bc-tfctb" Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.030631 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hn6ln\" (UniqueName: \"kubernetes.io/projected/b7a4d1d4-1933-45f7-a100-52315335b741-kube-api-access-hn6ln\") pod \"dnsmasq-dns-5f854695bc-tfctb\" (UID: \"b7a4d1d4-1933-45f7-a100-52315335b741\") " pod="openstack/dnsmasq-dns-5f854695bc-tfctb" Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.030660 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bcb1452-900e-4d5f-bd69-65d50257cd6c-config\") pod \"dnsmasq-dns-84bb9d8bd9-wdkj8\" (UID: \"1bcb1452-900e-4d5f-bd69-65d50257cd6c\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-wdkj8" Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.031540 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bcb1452-900e-4d5f-bd69-65d50257cd6c-config\") pod \"dnsmasq-dns-84bb9d8bd9-wdkj8\" (UID: \"1bcb1452-900e-4d5f-bd69-65d50257cd6c\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-wdkj8" Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.049173 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlknb\" (UniqueName: \"kubernetes.io/projected/1bcb1452-900e-4d5f-bd69-65d50257cd6c-kube-api-access-jlknb\") pod \"dnsmasq-dns-84bb9d8bd9-wdkj8\" (UID: \"1bcb1452-900e-4d5f-bd69-65d50257cd6c\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-wdkj8" Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.132866 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7a4d1d4-1933-45f7-a100-52315335b741-config\") pod \"dnsmasq-dns-5f854695bc-tfctb\" (UID: \"b7a4d1d4-1933-45f7-a100-52315335b741\") " pod="openstack/dnsmasq-dns-5f854695bc-tfctb" Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.132928 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7a4d1d4-1933-45f7-a100-52315335b741-dns-svc\") pod \"dnsmasq-dns-5f854695bc-tfctb\" (UID: \"b7a4d1d4-1933-45f7-a100-52315335b741\") " pod="openstack/dnsmasq-dns-5f854695bc-tfctb" Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.132957 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hn6ln\" (UniqueName: \"kubernetes.io/projected/b7a4d1d4-1933-45f7-a100-52315335b741-kube-api-access-hn6ln\") pod \"dnsmasq-dns-5f854695bc-tfctb\" (UID: \"b7a4d1d4-1933-45f7-a100-52315335b741\") " pod="openstack/dnsmasq-dns-5f854695bc-tfctb" Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.134235 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7a4d1d4-1933-45f7-a100-52315335b741-config\") pod \"dnsmasq-dns-5f854695bc-tfctb\" (UID: \"b7a4d1d4-1933-45f7-a100-52315335b741\") " pod="openstack/dnsmasq-dns-5f854695bc-tfctb" Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.134428 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7a4d1d4-1933-45f7-a100-52315335b741-dns-svc\") pod \"dnsmasq-dns-5f854695bc-tfctb\" (UID: \"b7a4d1d4-1933-45f7-a100-52315335b741\") " pod="openstack/dnsmasq-dns-5f854695bc-tfctb" Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.149708 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hn6ln\" (UniqueName: \"kubernetes.io/projected/b7a4d1d4-1933-45f7-a100-52315335b741-kube-api-access-hn6ln\") pod \"dnsmasq-dns-5f854695bc-tfctb\" (UID: \"b7a4d1d4-1933-45f7-a100-52315335b741\") " pod="openstack/dnsmasq-dns-5f854695bc-tfctb" Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.201848 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-wdkj8" Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.255919 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-tfctb" Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.600423 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-wdkj8"] Dec 13 07:00:11 crc kubenswrapper[4644]: I1213 07:00:11.664905 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-tfctb"] Dec 13 07:00:12 crc kubenswrapper[4644]: I1213 07:00:12.271146 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-tfctb" event={"ID":"b7a4d1d4-1933-45f7-a100-52315335b741","Type":"ContainerStarted","Data":"eca6ebdfd8ed96e87e211c8590fcab9fcd452dc0078c7cbce38f90fa803179d6"} Dec 13 07:00:12 crc kubenswrapper[4644]: I1213 07:00:12.272163 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-wdkj8" event={"ID":"1bcb1452-900e-4d5f-bd69-65d50257cd6c","Type":"ContainerStarted","Data":"e0b7e2849697ede4d5b76f1b86e8aa20b871ba4d3636c470ab9e56e8463355d8"} Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.123149 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-tfctb"] Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.141788 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-45ghl"] Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.142828 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.152451 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-45ghl"] Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.285378 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-45ghl\" (UID: \"710276eb-129b-4cf2-bbc4-2a41e70c5c1a\") " pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.285521 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-config\") pod \"dnsmasq-dns-744ffd65bc-45ghl\" (UID: \"710276eb-129b-4cf2-bbc4-2a41e70c5c1a\") " pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.285557 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5chk\" (UniqueName: \"kubernetes.io/projected/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-kube-api-access-q5chk\") pod \"dnsmasq-dns-744ffd65bc-45ghl\" (UID: \"710276eb-129b-4cf2-bbc4-2a41e70c5c1a\") " pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.388302 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-config\") pod \"dnsmasq-dns-744ffd65bc-45ghl\" (UID: \"710276eb-129b-4cf2-bbc4-2a41e70c5c1a\") " pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.388372 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5chk\" (UniqueName: \"kubernetes.io/projected/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-kube-api-access-q5chk\") pod \"dnsmasq-dns-744ffd65bc-45ghl\" (UID: \"710276eb-129b-4cf2-bbc4-2a41e70c5c1a\") " pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.388544 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-45ghl\" (UID: \"710276eb-129b-4cf2-bbc4-2a41e70c5c1a\") " pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.390010 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-45ghl\" (UID: \"710276eb-129b-4cf2-bbc4-2a41e70c5c1a\") " pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.390771 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-config\") pod \"dnsmasq-dns-744ffd65bc-45ghl\" (UID: \"710276eb-129b-4cf2-bbc4-2a41e70c5c1a\") " pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.445626 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-wdkj8"] Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.450260 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-6gkc2"] Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.451545 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.453202 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5chk\" (UniqueName: \"kubernetes.io/projected/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-kube-api-access-q5chk\") pod \"dnsmasq-dns-744ffd65bc-45ghl\" (UID: \"710276eb-129b-4cf2-bbc4-2a41e70c5c1a\") " pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.460793 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.463906 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-6gkc2"] Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.491429 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6331da5-ec97-4e23-8a5d-a9977be81c72-config\") pod \"dnsmasq-dns-95f5f6995-6gkc2\" (UID: \"a6331da5-ec97-4e23-8a5d-a9977be81c72\") " pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.491577 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6331da5-ec97-4e23-8a5d-a9977be81c72-dns-svc\") pod \"dnsmasq-dns-95f5f6995-6gkc2\" (UID: \"a6331da5-ec97-4e23-8a5d-a9977be81c72\") " pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.491750 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk9rj\" (UniqueName: \"kubernetes.io/projected/a6331da5-ec97-4e23-8a5d-a9977be81c72-kube-api-access-zk9rj\") pod \"dnsmasq-dns-95f5f6995-6gkc2\" (UID: \"a6331da5-ec97-4e23-8a5d-a9977be81c72\") " pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.594792 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk9rj\" (UniqueName: \"kubernetes.io/projected/a6331da5-ec97-4e23-8a5d-a9977be81c72-kube-api-access-zk9rj\") pod \"dnsmasq-dns-95f5f6995-6gkc2\" (UID: \"a6331da5-ec97-4e23-8a5d-a9977be81c72\") " pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.595288 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6331da5-ec97-4e23-8a5d-a9977be81c72-config\") pod \"dnsmasq-dns-95f5f6995-6gkc2\" (UID: \"a6331da5-ec97-4e23-8a5d-a9977be81c72\") " pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.595353 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6331da5-ec97-4e23-8a5d-a9977be81c72-dns-svc\") pod \"dnsmasq-dns-95f5f6995-6gkc2\" (UID: \"a6331da5-ec97-4e23-8a5d-a9977be81c72\") " pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.597865 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6331da5-ec97-4e23-8a5d-a9977be81c72-config\") pod \"dnsmasq-dns-95f5f6995-6gkc2\" (UID: \"a6331da5-ec97-4e23-8a5d-a9977be81c72\") " pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.598124 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6331da5-ec97-4e23-8a5d-a9977be81c72-dns-svc\") pod \"dnsmasq-dns-95f5f6995-6gkc2\" (UID: \"a6331da5-ec97-4e23-8a5d-a9977be81c72\") " pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.620301 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk9rj\" (UniqueName: \"kubernetes.io/projected/a6331da5-ec97-4e23-8a5d-a9977be81c72-kube-api-access-zk9rj\") pod \"dnsmasq-dns-95f5f6995-6gkc2\" (UID: \"a6331da5-ec97-4e23-8a5d-a9977be81c72\") " pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" Dec 13 07:00:14 crc kubenswrapper[4644]: I1213 07:00:14.785953 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.290704 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.292716 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.295640 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.295718 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-xw827" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.295907 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.296143 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.297543 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.298696 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.302890 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.318619 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.407567 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.407719 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-config-data\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.407756 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.407780 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.407838 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.408099 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkc2n\" (UniqueName: \"kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-kube-api-access-xkc2n\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.408214 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.408308 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.408791 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.408821 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.408959 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.510141 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.510194 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.510232 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkc2n\" (UniqueName: \"kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-kube-api-access-xkc2n\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.510252 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.510286 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.510308 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.510323 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.510353 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.510385 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.511414 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-config-data\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.511480 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.513477 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.513703 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.513701 4644 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.514183 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-config-data\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.514310 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.514551 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.516717 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.517152 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.517684 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.518240 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.528615 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkc2n\" (UniqueName: \"kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-kube-api-access-xkc2n\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.532193 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.588012 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.589071 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.593089 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.596018 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.596262 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.596420 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.596636 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.596773 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.596953 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-mxxlr" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.609224 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.616897 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.714151 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.714197 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.714229 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.714260 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.714316 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.714356 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.714386 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.714470 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74lzh\" (UniqueName: \"kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-kube-api-access-74lzh\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.714487 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.714500 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.714540 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.815823 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.816065 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.816125 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.816168 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.816200 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.816274 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.816289 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.816303 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74lzh\" (UniqueName: \"kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-kube-api-access-74lzh\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.816347 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.816368 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.816394 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.816593 4644 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.816949 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.817633 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.817940 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.820139 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.820877 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.823807 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.824202 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.835055 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.835243 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.837637 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.838013 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74lzh\" (UniqueName: \"kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-kube-api-access-74lzh\") pod \"rabbitmq-cell1-server-0\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:15 crc kubenswrapper[4644]: I1213 07:00:15.914589 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.800291 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.801599 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.805434 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.805741 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.806003 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.813923 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.814654 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-lsdlv" Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.820822 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.936940 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lm5l\" (UniqueName: \"kubernetes.io/projected/8a194627-4be2-4f29-ab01-c84d89070739-kube-api-access-6lm5l\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.937053 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8a194627-4be2-4f29-ab01-c84d89070739-config-data-default\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.937082 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8a194627-4be2-4f29-ab01-c84d89070739-kolla-config\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.937138 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.937159 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a194627-4be2-4f29-ab01-c84d89070739-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.937183 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8a194627-4be2-4f29-ab01-c84d89070739-config-data-generated\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.937343 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a194627-4be2-4f29-ab01-c84d89070739-operator-scripts\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:16 crc kubenswrapper[4644]: I1213 07:00:16.937367 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a194627-4be2-4f29-ab01-c84d89070739-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.038844 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a194627-4be2-4f29-ab01-c84d89070739-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.038902 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8a194627-4be2-4f29-ab01-c84d89070739-config-data-generated\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.038951 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a194627-4be2-4f29-ab01-c84d89070739-operator-scripts\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.038970 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a194627-4be2-4f29-ab01-c84d89070739-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.039107 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lm5l\" (UniqueName: \"kubernetes.io/projected/8a194627-4be2-4f29-ab01-c84d89070739-kube-api-access-6lm5l\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.039142 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8a194627-4be2-4f29-ab01-c84d89070739-config-data-default\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.039162 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8a194627-4be2-4f29-ab01-c84d89070739-kolla-config\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.039198 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.039407 4644 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.043977 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8a194627-4be2-4f29-ab01-c84d89070739-config-data-generated\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.044465 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8a194627-4be2-4f29-ab01-c84d89070739-config-data-default\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.045884 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a194627-4be2-4f29-ab01-c84d89070739-operator-scripts\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.048974 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a194627-4be2-4f29-ab01-c84d89070739-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.054042 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8a194627-4be2-4f29-ab01-c84d89070739-kolla-config\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.059583 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.060014 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lm5l\" (UniqueName: \"kubernetes.io/projected/8a194627-4be2-4f29-ab01-c84d89070739-kube-api-access-6lm5l\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.062952 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a194627-4be2-4f29-ab01-c84d89070739-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"8a194627-4be2-4f29-ab01-c84d89070739\") " pod="openstack/openstack-galera-0" Dec 13 07:00:17 crc kubenswrapper[4644]: I1213 07:00:17.124941 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.222720 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.224071 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.230771 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.231228 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.232074 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.232557 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-fw4fd" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.234574 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.366342 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8e96f9f-4c53-4df2-b818-c3341709594f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.366407 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e8e96f9f-4c53-4df2-b818-c3341709594f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.366429 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e8e96f9f-4c53-4df2-b818-c3341709594f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.366513 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.366583 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e8e96f9f-4c53-4df2-b818-c3341709594f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.366599 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8e96f9f-4c53-4df2-b818-c3341709594f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.366633 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl4wj\" (UniqueName: \"kubernetes.io/projected/e8e96f9f-4c53-4df2-b818-c3341709594f-kube-api-access-fl4wj\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.366704 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e8e96f9f-4c53-4df2-b818-c3341709594f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.467722 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e8e96f9f-4c53-4df2-b818-c3341709594f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.467768 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8e96f9f-4c53-4df2-b818-c3341709594f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.467811 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl4wj\" (UniqueName: \"kubernetes.io/projected/e8e96f9f-4c53-4df2-b818-c3341709594f-kube-api-access-fl4wj\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.467863 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e8e96f9f-4c53-4df2-b818-c3341709594f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.467927 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8e96f9f-4c53-4df2-b818-c3341709594f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.467960 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e8e96f9f-4c53-4df2-b818-c3341709594f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.467980 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e8e96f9f-4c53-4df2-b818-c3341709594f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.468044 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.468284 4644 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.469037 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e8e96f9f-4c53-4df2-b818-c3341709594f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.470317 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e8e96f9f-4c53-4df2-b818-c3341709594f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.470370 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e8e96f9f-4c53-4df2-b818-c3341709594f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.472889 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e8e96f9f-4c53-4df2-b818-c3341709594f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.473931 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8e96f9f-4c53-4df2-b818-c3341709594f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.477633 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8e96f9f-4c53-4df2-b818-c3341709594f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.486075 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.502835 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl4wj\" (UniqueName: \"kubernetes.io/projected/e8e96f9f-4c53-4df2-b818-c3341709594f-kube-api-access-fl4wj\") pod \"openstack-cell1-galera-0\" (UID: \"e8e96f9f-4c53-4df2-b818-c3341709594f\") " pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.543964 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.545217 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.546217 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.548118 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.548362 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.553219 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.555648 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-ghq2n" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.674711 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-282wx\" (UniqueName: \"kubernetes.io/projected/103d51fb-7ed3-487e-819f-bbcdfb2dea86-kube-api-access-282wx\") pod \"memcached-0\" (UID: \"103d51fb-7ed3-487e-819f-bbcdfb2dea86\") " pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.674775 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/103d51fb-7ed3-487e-819f-bbcdfb2dea86-combined-ca-bundle\") pod \"memcached-0\" (UID: \"103d51fb-7ed3-487e-819f-bbcdfb2dea86\") " pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.674919 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/103d51fb-7ed3-487e-819f-bbcdfb2dea86-kolla-config\") pod \"memcached-0\" (UID: \"103d51fb-7ed3-487e-819f-bbcdfb2dea86\") " pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.675066 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/103d51fb-7ed3-487e-819f-bbcdfb2dea86-config-data\") pod \"memcached-0\" (UID: \"103d51fb-7ed3-487e-819f-bbcdfb2dea86\") " pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.675151 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/103d51fb-7ed3-487e-819f-bbcdfb2dea86-memcached-tls-certs\") pod \"memcached-0\" (UID: \"103d51fb-7ed3-487e-819f-bbcdfb2dea86\") " pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.778844 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/103d51fb-7ed3-487e-819f-bbcdfb2dea86-config-data\") pod \"memcached-0\" (UID: \"103d51fb-7ed3-487e-819f-bbcdfb2dea86\") " pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.778920 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/103d51fb-7ed3-487e-819f-bbcdfb2dea86-memcached-tls-certs\") pod \"memcached-0\" (UID: \"103d51fb-7ed3-487e-819f-bbcdfb2dea86\") " pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.779002 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-282wx\" (UniqueName: \"kubernetes.io/projected/103d51fb-7ed3-487e-819f-bbcdfb2dea86-kube-api-access-282wx\") pod \"memcached-0\" (UID: \"103d51fb-7ed3-487e-819f-bbcdfb2dea86\") " pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.779108 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/103d51fb-7ed3-487e-819f-bbcdfb2dea86-combined-ca-bundle\") pod \"memcached-0\" (UID: \"103d51fb-7ed3-487e-819f-bbcdfb2dea86\") " pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.779192 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/103d51fb-7ed3-487e-819f-bbcdfb2dea86-kolla-config\") pod \"memcached-0\" (UID: \"103d51fb-7ed3-487e-819f-bbcdfb2dea86\") " pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.779651 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/103d51fb-7ed3-487e-819f-bbcdfb2dea86-config-data\") pod \"memcached-0\" (UID: \"103d51fb-7ed3-487e-819f-bbcdfb2dea86\") " pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.780817 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/103d51fb-7ed3-487e-819f-bbcdfb2dea86-kolla-config\") pod \"memcached-0\" (UID: \"103d51fb-7ed3-487e-819f-bbcdfb2dea86\") " pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.783811 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/103d51fb-7ed3-487e-819f-bbcdfb2dea86-memcached-tls-certs\") pod \"memcached-0\" (UID: \"103d51fb-7ed3-487e-819f-bbcdfb2dea86\") " pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.784513 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/103d51fb-7ed3-487e-819f-bbcdfb2dea86-combined-ca-bundle\") pod \"memcached-0\" (UID: \"103d51fb-7ed3-487e-819f-bbcdfb2dea86\") " pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.800721 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-282wx\" (UniqueName: \"kubernetes.io/projected/103d51fb-7ed3-487e-819f-bbcdfb2dea86-kube-api-access-282wx\") pod \"memcached-0\" (UID: \"103d51fb-7ed3-487e-819f-bbcdfb2dea86\") " pod="openstack/memcached-0" Dec 13 07:00:18 crc kubenswrapper[4644]: I1213 07:00:18.871708 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 13 07:00:20 crc kubenswrapper[4644]: I1213 07:00:20.725348 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 07:00:20 crc kubenswrapper[4644]: I1213 07:00:20.727415 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 13 07:00:20 crc kubenswrapper[4644]: I1213 07:00:20.733817 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-w7m5t" Dec 13 07:00:20 crc kubenswrapper[4644]: I1213 07:00:20.737469 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 07:00:20 crc kubenswrapper[4644]: I1213 07:00:20.823322 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcx5f\" (UniqueName: \"kubernetes.io/projected/628995d0-5034-4f64-8c48-50eee052e5db-kube-api-access-hcx5f\") pod \"kube-state-metrics-0\" (UID: \"628995d0-5034-4f64-8c48-50eee052e5db\") " pod="openstack/kube-state-metrics-0" Dec 13 07:00:20 crc kubenswrapper[4644]: I1213 07:00:20.924746 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcx5f\" (UniqueName: \"kubernetes.io/projected/628995d0-5034-4f64-8c48-50eee052e5db-kube-api-access-hcx5f\") pod \"kube-state-metrics-0\" (UID: \"628995d0-5034-4f64-8c48-50eee052e5db\") " pod="openstack/kube-state-metrics-0" Dec 13 07:00:20 crc kubenswrapper[4644]: I1213 07:00:20.949931 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcx5f\" (UniqueName: \"kubernetes.io/projected/628995d0-5034-4f64-8c48-50eee052e5db-kube-api-access-hcx5f\") pod \"kube-state-metrics-0\" (UID: \"628995d0-5034-4f64-8c48-50eee052e5db\") " pod="openstack/kube-state-metrics-0" Dec 13 07:00:20 crc kubenswrapper[4644]: I1213 07:00:20.990088 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 07:00:21 crc kubenswrapper[4644]: I1213 07:00:21.060220 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 13 07:00:21 crc kubenswrapper[4644]: I1213 07:00:21.063408 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 13 07:00:21 crc kubenswrapper[4644]: I1213 07:00:21.071684 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 07:00:21 crc kubenswrapper[4644]: I1213 07:00:21.079044 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-45ghl"] Dec 13 07:00:21 crc kubenswrapper[4644]: I1213 07:00:21.169955 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 13 07:00:21 crc kubenswrapper[4644]: I1213 07:00:21.174629 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-6gkc2"] Dec 13 07:00:21 crc kubenswrapper[4644]: I1213 07:00:21.180059 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.812413 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-cb7bh"] Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.814522 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.816573 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-f54tn" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.816922 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.828262 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-txrxh"] Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.829323 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-txrxh" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.833815 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.834650 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-txrxh"] Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.841348 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-cb7bh"] Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.985988 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4krp\" (UniqueName: \"kubernetes.io/projected/0d73b937-2604-4e87-867b-acdc34d21e5e-kube-api-access-c4krp\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.986049 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d73b937-2604-4e87-867b-acdc34d21e5e-scripts\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.986090 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e2e749f-da1d-4692-9479-275cd00bc4b0-var-log-ovn\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.986231 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/0d73b937-2604-4e87-867b-acdc34d21e5e-var-lib\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.986334 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e2e749f-da1d-4692-9479-275cd00bc4b0-var-run-ovn\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.986405 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5268t\" (UniqueName: \"kubernetes.io/projected/6e2e749f-da1d-4692-9479-275cd00bc4b0-kube-api-access-5268t\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.986755 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0d73b937-2604-4e87-867b-acdc34d21e5e-var-run\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.986822 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e2e749f-da1d-4692-9479-275cd00bc4b0-var-run\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.986844 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/0d73b937-2604-4e87-867b-acdc34d21e5e-var-log\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.986965 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/0d73b937-2604-4e87-867b-acdc34d21e5e-etc-ovs\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.987022 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e2e749f-da1d-4692-9479-275cd00bc4b0-combined-ca-bundle\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.987101 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e2e749f-da1d-4692-9479-275cd00bc4b0-ovn-controller-tls-certs\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:23 crc kubenswrapper[4644]: I1213 07:00:23.987190 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e2e749f-da1d-4692-9479-275cd00bc4b0-scripts\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.090546 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e2e749f-da1d-4692-9479-275cd00bc4b0-scripts\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.090658 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4krp\" (UniqueName: \"kubernetes.io/projected/0d73b937-2604-4e87-867b-acdc34d21e5e-kube-api-access-c4krp\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.090736 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d73b937-2604-4e87-867b-acdc34d21e5e-scripts\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.090769 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e2e749f-da1d-4692-9479-275cd00bc4b0-var-log-ovn\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.091461 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/0d73b937-2604-4e87-867b-acdc34d21e5e-var-lib\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.091502 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e2e749f-da1d-4692-9479-275cd00bc4b0-var-run-ovn\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.091525 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0d73b937-2604-4e87-867b-acdc34d21e5e-var-run\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.091545 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5268t\" (UniqueName: \"kubernetes.io/projected/6e2e749f-da1d-4692-9479-275cd00bc4b0-kube-api-access-5268t\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.091567 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/0d73b937-2604-4e87-867b-acdc34d21e5e-var-log\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.091581 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e2e749f-da1d-4692-9479-275cd00bc4b0-var-run\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.091640 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/0d73b937-2604-4e87-867b-acdc34d21e5e-etc-ovs\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.091689 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e2e749f-da1d-4692-9479-275cd00bc4b0-combined-ca-bundle\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.091733 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e2e749f-da1d-4692-9479-275cd00bc4b0-ovn-controller-tls-certs\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.093013 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e2e749f-da1d-4692-9479-275cd00bc4b0-var-run-ovn\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.093115 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/0d73b937-2604-4e87-867b-acdc34d21e5e-var-lib\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.093146 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0d73b937-2604-4e87-867b-acdc34d21e5e-var-run\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.093218 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/0d73b937-2604-4e87-867b-acdc34d21e5e-var-log\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.093271 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e2e749f-da1d-4692-9479-275cd00bc4b0-var-log-ovn\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.093305 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/0d73b937-2604-4e87-867b-acdc34d21e5e-etc-ovs\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.093352 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e2e749f-da1d-4692-9479-275cd00bc4b0-var-run\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.094764 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d73b937-2604-4e87-867b-acdc34d21e5e-scripts\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.094958 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e2e749f-da1d-4692-9479-275cd00bc4b0-scripts\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.103856 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e2e749f-da1d-4692-9479-275cd00bc4b0-ovn-controller-tls-certs\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.104836 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e2e749f-da1d-4692-9479-275cd00bc4b0-combined-ca-bundle\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.107757 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4krp\" (UniqueName: \"kubernetes.io/projected/0d73b937-2604-4e87-867b-acdc34d21e5e-kube-api-access-c4krp\") pod \"ovn-controller-ovs-cb7bh\" (UID: \"0d73b937-2604-4e87-867b-acdc34d21e5e\") " pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.109912 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5268t\" (UniqueName: \"kubernetes.io/projected/6e2e749f-da1d-4692-9479-275cd00bc4b0-kube-api-access-5268t\") pod \"ovn-controller-txrxh\" (UID: \"6e2e749f-da1d-4692-9479-275cd00bc4b0\") " pod="openstack/ovn-controller-txrxh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.136488 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:24 crc kubenswrapper[4644]: I1213 07:00:24.150631 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-txrxh" Dec 13 07:00:25 crc kubenswrapper[4644]: I1213 07:00:25.435871 4644 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.043145 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.063494 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-txrxh"] Dec 13 07:00:26 crc kubenswrapper[4644]: W1213 07:00:26.096488 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e2e749f_da1d_4692_9479_275cd00bc4b0.slice/crio-2d9929d9cdb119e8e6c8b07b08c49603ccc3887f1065b04ce91e22845500bbb4 WatchSource:0}: Error finding container 2d9929d9cdb119e8e6c8b07b08c49603ccc3887f1065b04ce91e22845500bbb4: Status 404 returned error can't find the container with id 2d9929d9cdb119e8e6c8b07b08c49603ccc3887f1065b04ce91e22845500bbb4 Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.195128 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-cb7bh"] Dec 13 07:00:26 crc kubenswrapper[4644]: W1213 07:00:26.200898 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d73b937_2604_4e87_867b_acdc34d21e5e.slice/crio-85ba8a3d0b60b15f4620e64a9e34afafe890d9bf7f61794746bd09ee6e8f05b2 WatchSource:0}: Error finding container 85ba8a3d0b60b15f4620e64a9e34afafe890d9bf7f61794746bd09ee6e8f05b2: Status 404 returned error can't find the container with id 85ba8a3d0b60b15f4620e64a9e34afafe890d9bf7f61794746bd09ee6e8f05b2 Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.366291 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-dpzsf"] Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.367150 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.368723 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.369004 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.385578 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-dpzsf"] Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.390935 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8a194627-4be2-4f29-ab01-c84d89070739","Type":"ContainerStarted","Data":"9a18f8c83b43554173ca47e772061e2baaa128e3313576ab8563de87c98e4daf"} Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.426526 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0dd20500-a2dd-4608-a3c8-7d714ffb09c4","Type":"ContainerStarted","Data":"03a2bd297f67a38c7d89c909e9cbdaba5a92f8a3dcbe471c907ac364eded75db"} Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.426568 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-txrxh" event={"ID":"6e2e749f-da1d-4692-9479-275cd00bc4b0","Type":"ContainerStarted","Data":"2d9929d9cdb119e8e6c8b07b08c49603ccc3887f1065b04ce91e22845500bbb4"} Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.426580 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" event={"ID":"710276eb-129b-4cf2-bbc4-2a41e70c5c1a","Type":"ContainerStarted","Data":"5c98714e6c48e46915016628becfe385bd1b537ee77e8c61e90df3fcb9ac9468"} Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.426591 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" event={"ID":"710276eb-129b-4cf2-bbc4-2a41e70c5c1a","Type":"ContainerStarted","Data":"6829a9dc7bc7d3a63469f67aa920470995a2388f0b749712055da31e5c02aa13"} Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.427391 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"706c4700-9d13-4bac-b7ac-0c19c09cc7e7","Type":"ContainerStarted","Data":"b9120b4bf5edb948f9978d94d2a77c11f5e00d30e31116718e029c5702f6c1e7"} Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.429501 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"103d51fb-7ed3-487e-819f-bbcdfb2dea86","Type":"ContainerStarted","Data":"1173c881fdc2f6139bc6200ead87d5ffc582df9b22a8446cc9e876e98471221a"} Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.432288 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e8e96f9f-4c53-4df2-b818-c3341709594f","Type":"ContainerStarted","Data":"9307678851e9cb12c6a9a19367fc3f3ca5c6f650453d4ee2d22c6d137e4c77ee"} Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.433775 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" event={"ID":"a6331da5-ec97-4e23-8a5d-a9977be81c72","Type":"ContainerStarted","Data":"c87413c12414e7a737c8e69c610b761f48e5d3c7d88ec6b6118092959e09b26b"} Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.433797 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" event={"ID":"a6331da5-ec97-4e23-8a5d-a9977be81c72","Type":"ContainerStarted","Data":"356b821ab36cfae4edadc74d3813bdd04d32ef8e1501cb96b900f1d41cfa1076"} Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.439548 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-cb7bh" event={"ID":"0d73b937-2604-4e87-867b-acdc34d21e5e","Type":"ContainerStarted","Data":"85ba8a3d0b60b15f4620e64a9e34afafe890d9bf7f61794746bd09ee6e8f05b2"} Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.441583 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"628995d0-5034-4f64-8c48-50eee052e5db","Type":"ContainerStarted","Data":"5ba946bab0f6707806daeba084e2c5380d3ee3a484882f59c7a932d9b64f57fc"} Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.442844 4644 generic.go:334] "Generic (PLEG): container finished" podID="b7a4d1d4-1933-45f7-a100-52315335b741" containerID="1a2ed9d282b350e7e59b6040a83f4ecf2b370d22ca8755def1e3c3a3b822c682" exitCode=0 Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.442876 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-tfctb" event={"ID":"b7a4d1d4-1933-45f7-a100-52315335b741","Type":"ContainerDied","Data":"1a2ed9d282b350e7e59b6040a83f4ecf2b370d22ca8755def1e3c3a3b822c682"} Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.517244 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.518919 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.522240 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.522586 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-h8gbt" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.522748 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.522900 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.553895 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-combined-ca-bundle\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.553995 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-ovs-rundir\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.554078 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-ovn-rundir\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.554168 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.554285 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-config\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.554413 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtggg\" (UniqueName: \"kubernetes.io/projected/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-kube-api-access-jtggg\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.576485 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.661667 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/40233f71-8156-4715-adce-ef3fee2102e2-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.661737 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/40233f71-8156-4715-adce-ef3fee2102e2-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.661794 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40233f71-8156-4715-adce-ef3fee2102e2-config\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.661898 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40233f71-8156-4715-adce-ef3fee2102e2-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.662064 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-ovn-rundir\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.662124 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/40233f71-8156-4715-adce-ef3fee2102e2-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.662146 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-config\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.662194 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/40233f71-8156-4715-adce-ef3fee2102e2-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.662215 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtggg\" (UniqueName: \"kubernetes.io/projected/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-kube-api-access-jtggg\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.662338 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.662408 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlw2t\" (UniqueName: \"kubernetes.io/projected/40233f71-8156-4715-adce-ef3fee2102e2-kube-api-access-vlw2t\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.662476 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-combined-ca-bundle\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.662524 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-ovs-rundir\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.662598 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.662938 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-ovn-rundir\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.673962 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-ovs-rundir\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.674838 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.675015 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-config\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.680808 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-combined-ca-bundle\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.702230 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtggg\" (UniqueName: \"kubernetes.io/projected/1cb5b56c-b83a-4d43-b1fa-2a88580eceea-kube-api-access-jtggg\") pod \"ovn-controller-metrics-dpzsf\" (UID: \"1cb5b56c-b83a-4d43-b1fa-2a88580eceea\") " pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.730937 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-tfctb" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.764054 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/40233f71-8156-4715-adce-ef3fee2102e2-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.764117 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40233f71-8156-4715-adce-ef3fee2102e2-config\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.764138 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40233f71-8156-4715-adce-ef3fee2102e2-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.764178 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/40233f71-8156-4715-adce-ef3fee2102e2-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.764207 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/40233f71-8156-4715-adce-ef3fee2102e2-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.764250 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.764271 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlw2t\" (UniqueName: \"kubernetes.io/projected/40233f71-8156-4715-adce-ef3fee2102e2-kube-api-access-vlw2t\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.764332 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/40233f71-8156-4715-adce-ef3fee2102e2-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.764780 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/40233f71-8156-4715-adce-ef3fee2102e2-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.765602 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/40233f71-8156-4715-adce-ef3fee2102e2-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.766130 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40233f71-8156-4715-adce-ef3fee2102e2-config\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.767575 4644 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.769771 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/40233f71-8156-4715-adce-ef3fee2102e2-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.770190 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/40233f71-8156-4715-adce-ef3fee2102e2-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.770324 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40233f71-8156-4715-adce-ef3fee2102e2-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.782213 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlw2t\" (UniqueName: \"kubernetes.io/projected/40233f71-8156-4715-adce-ef3fee2102e2-kube-api-access-vlw2t\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.792810 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"40233f71-8156-4715-adce-ef3fee2102e2\") " pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.865703 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7a4d1d4-1933-45f7-a100-52315335b741-config\") pod \"b7a4d1d4-1933-45f7-a100-52315335b741\" (UID: \"b7a4d1d4-1933-45f7-a100-52315335b741\") " Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.865759 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7a4d1d4-1933-45f7-a100-52315335b741-dns-svc\") pod \"b7a4d1d4-1933-45f7-a100-52315335b741\" (UID: \"b7a4d1d4-1933-45f7-a100-52315335b741\") " Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.865804 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hn6ln\" (UniqueName: \"kubernetes.io/projected/b7a4d1d4-1933-45f7-a100-52315335b741-kube-api-access-hn6ln\") pod \"b7a4d1d4-1933-45f7-a100-52315335b741\" (UID: \"b7a4d1d4-1933-45f7-a100-52315335b741\") " Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.870144 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7a4d1d4-1933-45f7-a100-52315335b741-kube-api-access-hn6ln" (OuterVolumeSpecName: "kube-api-access-hn6ln") pod "b7a4d1d4-1933-45f7-a100-52315335b741" (UID: "b7a4d1d4-1933-45f7-a100-52315335b741"). InnerVolumeSpecName "kube-api-access-hn6ln". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.883865 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7a4d1d4-1933-45f7-a100-52315335b741-config" (OuterVolumeSpecName: "config") pod "b7a4d1d4-1933-45f7-a100-52315335b741" (UID: "b7a4d1d4-1933-45f7-a100-52315335b741"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.884038 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7a4d1d4-1933-45f7-a100-52315335b741-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b7a4d1d4-1933-45f7-a100-52315335b741" (UID: "b7a4d1d4-1933-45f7-a100-52315335b741"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.886726 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-dpzsf" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.932075 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.968053 4644 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7a4d1d4-1933-45f7-a100-52315335b741-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.968091 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hn6ln\" (UniqueName: \"kubernetes.io/projected/b7a4d1d4-1933-45f7-a100-52315335b741-kube-api-access-hn6ln\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:26 crc kubenswrapper[4644]: I1213 07:00:26.968104 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7a4d1d4-1933-45f7-a100-52315335b741-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.457113 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-tfctb" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.457118 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-tfctb" event={"ID":"b7a4d1d4-1933-45f7-a100-52315335b741","Type":"ContainerDied","Data":"eca6ebdfd8ed96e87e211c8590fcab9fcd452dc0078c7cbce38f90fa803179d6"} Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.457795 4644 scope.go:117] "RemoveContainer" containerID="1a2ed9d282b350e7e59b6040a83f4ecf2b370d22ca8755def1e3c3a3b822c682" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.462186 4644 generic.go:334] "Generic (PLEG): container finished" podID="a6331da5-ec97-4e23-8a5d-a9977be81c72" containerID="c87413c12414e7a737c8e69c610b761f48e5d3c7d88ec6b6118092959e09b26b" exitCode=0 Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.462342 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" event={"ID":"a6331da5-ec97-4e23-8a5d-a9977be81c72","Type":"ContainerDied","Data":"c87413c12414e7a737c8e69c610b761f48e5d3c7d88ec6b6118092959e09b26b"} Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.462379 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" event={"ID":"a6331da5-ec97-4e23-8a5d-a9977be81c72","Type":"ContainerStarted","Data":"c1567e4bf3fa32b226008aad08431f9ee08b4755e9c01916863331864ee65ffe"} Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.462737 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.470136 4644 generic.go:334] "Generic (PLEG): container finished" podID="710276eb-129b-4cf2-bbc4-2a41e70c5c1a" containerID="5c98714e6c48e46915016628becfe385bd1b537ee77e8c61e90df3fcb9ac9468" exitCode=0 Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.470188 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" event={"ID":"710276eb-129b-4cf2-bbc4-2a41e70c5c1a","Type":"ContainerDied","Data":"5c98714e6c48e46915016628becfe385bd1b537ee77e8c61e90df3fcb9ac9468"} Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.470218 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" event={"ID":"710276eb-129b-4cf2-bbc4-2a41e70c5c1a","Type":"ContainerStarted","Data":"c0bc8df05da38c5395c879d9a8a49be37e250af1091073f8fde65fa7708f5e8e"} Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.470352 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.479494 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" podStartSLOduration=12.736020287 podStartE2EDuration="13.47942083s" podCreationTimestamp="2025-12-13 07:00:14 +0000 UTC" firstStartedPulling="2025-12-13 07:00:25.463557753 +0000 UTC m=+887.678508586" lastFinishedPulling="2025-12-13 07:00:26.206958295 +0000 UTC m=+888.421909129" observedRunningTime="2025-12-13 07:00:27.474801064 +0000 UTC m=+889.689751896" watchObservedRunningTime="2025-12-13 07:00:27.47942083 +0000 UTC m=+889.694371662" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.504418 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" podStartSLOduration=12.791356122 podStartE2EDuration="13.504394136s" podCreationTimestamp="2025-12-13 07:00:14 +0000 UTC" firstStartedPulling="2025-12-13 07:00:25.486457409 +0000 UTC m=+887.701408242" lastFinishedPulling="2025-12-13 07:00:26.199495423 +0000 UTC m=+888.414446256" observedRunningTime="2025-12-13 07:00:27.490732208 +0000 UTC m=+889.705683041" watchObservedRunningTime="2025-12-13 07:00:27.504394136 +0000 UTC m=+889.719344969" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.534917 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 13 07:00:27 crc kubenswrapper[4644]: E1213 07:00:27.535348 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7a4d1d4-1933-45f7-a100-52315335b741" containerName="init" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.535507 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7a4d1d4-1933-45f7-a100-52315335b741" containerName="init" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.535882 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7a4d1d4-1933-45f7-a100-52315335b741" containerName="init" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.537155 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.540156 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-m2dpn" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.541116 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.545374 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.546037 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.557524 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-tfctb"] Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.562545 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-tfctb"] Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.608957 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.688390 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/829c5d95-3315-442d-8ec5-ed6a67497802-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.688772 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/829c5d95-3315-442d-8ec5-ed6a67497802-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.689024 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/829c5d95-3315-442d-8ec5-ed6a67497802-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.689151 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/829c5d95-3315-442d-8ec5-ed6a67497802-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.689219 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/829c5d95-3315-442d-8ec5-ed6a67497802-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.689286 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.689388 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rzc6\" (UniqueName: \"kubernetes.io/projected/829c5d95-3315-442d-8ec5-ed6a67497802-kube-api-access-4rzc6\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.689412 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/829c5d95-3315-442d-8ec5-ed6a67497802-config\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.791594 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.791718 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rzc6\" (UniqueName: \"kubernetes.io/projected/829c5d95-3315-442d-8ec5-ed6a67497802-kube-api-access-4rzc6\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.791744 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/829c5d95-3315-442d-8ec5-ed6a67497802-config\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.791819 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/829c5d95-3315-442d-8ec5-ed6a67497802-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.791842 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/829c5d95-3315-442d-8ec5-ed6a67497802-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.792083 4644 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.798138 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/829c5d95-3315-442d-8ec5-ed6a67497802-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.799003 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/829c5d95-3315-442d-8ec5-ed6a67497802-config\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.800018 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/829c5d95-3315-442d-8ec5-ed6a67497802-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.800168 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/829c5d95-3315-442d-8ec5-ed6a67497802-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.800195 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/829c5d95-3315-442d-8ec5-ed6a67497802-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.800400 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/829c5d95-3315-442d-8ec5-ed6a67497802-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.807008 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/829c5d95-3315-442d-8ec5-ed6a67497802-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.807064 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/829c5d95-3315-442d-8ec5-ed6a67497802-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.808258 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/829c5d95-3315-442d-8ec5-ed6a67497802-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.824666 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rzc6\" (UniqueName: \"kubernetes.io/projected/829c5d95-3315-442d-8ec5-ed6a67497802-kube-api-access-4rzc6\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.827789 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"829c5d95-3315-442d-8ec5-ed6a67497802\") " pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:27 crc kubenswrapper[4644]: I1213 07:00:27.856584 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:28 crc kubenswrapper[4644]: I1213 07:00:28.067433 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-dpzsf"] Dec 13 07:00:28 crc kubenswrapper[4644]: I1213 07:00:28.112874 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 13 07:00:28 crc kubenswrapper[4644]: W1213 07:00:28.209022 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1cb5b56c_b83a_4d43_b1fa_2a88580eceea.slice/crio-bb22a54a78679bac1f8dad51bfc1f0d6f5a0ca23188f7ea64e7839b43b4b1b6b WatchSource:0}: Error finding container bb22a54a78679bac1f8dad51bfc1f0d6f5a0ca23188f7ea64e7839b43b4b1b6b: Status 404 returned error can't find the container with id bb22a54a78679bac1f8dad51bfc1f0d6f5a0ca23188f7ea64e7839b43b4b1b6b Dec 13 07:00:28 crc kubenswrapper[4644]: W1213 07:00:28.210260 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40233f71_8156_4715_adce_ef3fee2102e2.slice/crio-18d6187d35e07cc0cf5183ccdde0071f1c59265ef5c3059fdb09108652ad19ee WatchSource:0}: Error finding container 18d6187d35e07cc0cf5183ccdde0071f1c59265ef5c3059fdb09108652ad19ee: Status 404 returned error can't find the container with id 18d6187d35e07cc0cf5183ccdde0071f1c59265ef5c3059fdb09108652ad19ee Dec 13 07:00:28 crc kubenswrapper[4644]: I1213 07:00:28.404210 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7a4d1d4-1933-45f7-a100-52315335b741" path="/var/lib/kubelet/pods/b7a4d1d4-1933-45f7-a100-52315335b741/volumes" Dec 13 07:00:28 crc kubenswrapper[4644]: I1213 07:00:28.491723 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-dpzsf" event={"ID":"1cb5b56c-b83a-4d43-b1fa-2a88580eceea","Type":"ContainerStarted","Data":"bb22a54a78679bac1f8dad51bfc1f0d6f5a0ca23188f7ea64e7839b43b4b1b6b"} Dec 13 07:00:28 crc kubenswrapper[4644]: I1213 07:00:28.496431 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84bb9d8bd9-wdkj8" podUID="1bcb1452-900e-4d5f-bd69-65d50257cd6c" containerName="init" containerID="cri-o://f6d976f361fc4c284c00ef268b307e238df195c12bc3f699a3dd0d83e7a6d2b8" gracePeriod=10 Dec 13 07:00:28 crc kubenswrapper[4644]: I1213 07:00:28.496862 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-wdkj8" event={"ID":"1bcb1452-900e-4d5f-bd69-65d50257cd6c","Type":"ContainerStarted","Data":"f6d976f361fc4c284c00ef268b307e238df195c12bc3f699a3dd0d83e7a6d2b8"} Dec 13 07:00:28 crc kubenswrapper[4644]: I1213 07:00:28.502708 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"40233f71-8156-4715-adce-ef3fee2102e2","Type":"ContainerStarted","Data":"18d6187d35e07cc0cf5183ccdde0071f1c59265ef5c3059fdb09108652ad19ee"} Dec 13 07:00:28 crc kubenswrapper[4644]: I1213 07:00:28.819557 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 13 07:00:29 crc kubenswrapper[4644]: I1213 07:00:29.512411 4644 generic.go:334] "Generic (PLEG): container finished" podID="1bcb1452-900e-4d5f-bd69-65d50257cd6c" containerID="f6d976f361fc4c284c00ef268b307e238df195c12bc3f699a3dd0d83e7a6d2b8" exitCode=0 Dec 13 07:00:29 crc kubenswrapper[4644]: I1213 07:00:29.512482 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-wdkj8" event={"ID":"1bcb1452-900e-4d5f-bd69-65d50257cd6c","Type":"ContainerDied","Data":"f6d976f361fc4c284c00ef268b307e238df195c12bc3f699a3dd0d83e7a6d2b8"} Dec 13 07:00:29 crc kubenswrapper[4644]: W1213 07:00:29.829148 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod829c5d95_3315_442d_8ec5_ed6a67497802.slice/crio-e46055094c4d0f7e6c3b819c3f159f08aecbda09019491ba0872c8df1ee96fc7 WatchSource:0}: Error finding container e46055094c4d0f7e6c3b819c3f159f08aecbda09019491ba0872c8df1ee96fc7: Status 404 returned error can't find the container with id e46055094c4d0f7e6c3b819c3f159f08aecbda09019491ba0872c8df1ee96fc7 Dec 13 07:00:30 crc kubenswrapper[4644]: I1213 07:00:30.521472 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"829c5d95-3315-442d-8ec5-ed6a67497802","Type":"ContainerStarted","Data":"e46055094c4d0f7e6c3b819c3f159f08aecbda09019491ba0872c8df1ee96fc7"} Dec 13 07:00:32 crc kubenswrapper[4644]: I1213 07:00:32.910460 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-wdkj8" Dec 13 07:00:32 crc kubenswrapper[4644]: I1213 07:00:32.996479 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bcb1452-900e-4d5f-bd69-65d50257cd6c-config\") pod \"1bcb1452-900e-4d5f-bd69-65d50257cd6c\" (UID: \"1bcb1452-900e-4d5f-bd69-65d50257cd6c\") " Dec 13 07:00:32 crc kubenswrapper[4644]: I1213 07:00:32.996644 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jlknb\" (UniqueName: \"kubernetes.io/projected/1bcb1452-900e-4d5f-bd69-65d50257cd6c-kube-api-access-jlknb\") pod \"1bcb1452-900e-4d5f-bd69-65d50257cd6c\" (UID: \"1bcb1452-900e-4d5f-bd69-65d50257cd6c\") " Dec 13 07:00:33 crc kubenswrapper[4644]: I1213 07:00:33.002357 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bcb1452-900e-4d5f-bd69-65d50257cd6c-kube-api-access-jlknb" (OuterVolumeSpecName: "kube-api-access-jlknb") pod "1bcb1452-900e-4d5f-bd69-65d50257cd6c" (UID: "1bcb1452-900e-4d5f-bd69-65d50257cd6c"). InnerVolumeSpecName "kube-api-access-jlknb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:00:33 crc kubenswrapper[4644]: I1213 07:00:33.017948 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bcb1452-900e-4d5f-bd69-65d50257cd6c-config" (OuterVolumeSpecName: "config") pod "1bcb1452-900e-4d5f-bd69-65d50257cd6c" (UID: "1bcb1452-900e-4d5f-bd69-65d50257cd6c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:33 crc kubenswrapper[4644]: I1213 07:00:33.099566 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jlknb\" (UniqueName: \"kubernetes.io/projected/1bcb1452-900e-4d5f-bd69-65d50257cd6c-kube-api-access-jlknb\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:33 crc kubenswrapper[4644]: I1213 07:00:33.099603 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bcb1452-900e-4d5f-bd69-65d50257cd6c-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:33 crc kubenswrapper[4644]: I1213 07:00:33.541977 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-wdkj8" event={"ID":"1bcb1452-900e-4d5f-bd69-65d50257cd6c","Type":"ContainerDied","Data":"e0b7e2849697ede4d5b76f1b86e8aa20b871ba4d3636c470ab9e56e8463355d8"} Dec 13 07:00:33 crc kubenswrapper[4644]: I1213 07:00:33.542019 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-wdkj8" Dec 13 07:00:33 crc kubenswrapper[4644]: I1213 07:00:33.542037 4644 scope.go:117] "RemoveContainer" containerID="f6d976f361fc4c284c00ef268b307e238df195c12bc3f699a3dd0d83e7a6d2b8" Dec 13 07:00:33 crc kubenswrapper[4644]: I1213 07:00:33.587757 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-wdkj8"] Dec 13 07:00:33 crc kubenswrapper[4644]: I1213 07:00:33.593005 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-wdkj8"] Dec 13 07:00:34 crc kubenswrapper[4644]: I1213 07:00:34.397941 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bcb1452-900e-4d5f-bd69-65d50257cd6c" path="/var/lib/kubelet/pods/1bcb1452-900e-4d5f-bd69-65d50257cd6c/volumes" Dec 13 07:00:34 crc kubenswrapper[4644]: I1213 07:00:34.462655 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" Dec 13 07:00:34 crc kubenswrapper[4644]: I1213 07:00:34.788774 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" Dec 13 07:00:34 crc kubenswrapper[4644]: I1213 07:00:34.855281 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-45ghl"] Dec 13 07:00:34 crc kubenswrapper[4644]: I1213 07:00:34.855514 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" podUID="710276eb-129b-4cf2-bbc4-2a41e70c5c1a" containerName="dnsmasq-dns" containerID="cri-o://c0bc8df05da38c5395c879d9a8a49be37e250af1091073f8fde65fa7708f5e8e" gracePeriod=10 Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.350273 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.456460 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-dns-svc\") pod \"710276eb-129b-4cf2-bbc4-2a41e70c5c1a\" (UID: \"710276eb-129b-4cf2-bbc4-2a41e70c5c1a\") " Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.456791 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5chk\" (UniqueName: \"kubernetes.io/projected/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-kube-api-access-q5chk\") pod \"710276eb-129b-4cf2-bbc4-2a41e70c5c1a\" (UID: \"710276eb-129b-4cf2-bbc4-2a41e70c5c1a\") " Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.456823 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-config\") pod \"710276eb-129b-4cf2-bbc4-2a41e70c5c1a\" (UID: \"710276eb-129b-4cf2-bbc4-2a41e70c5c1a\") " Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.539540 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-kube-api-access-q5chk" (OuterVolumeSpecName: "kube-api-access-q5chk") pod "710276eb-129b-4cf2-bbc4-2a41e70c5c1a" (UID: "710276eb-129b-4cf2-bbc4-2a41e70c5c1a"). InnerVolumeSpecName "kube-api-access-q5chk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.559395 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5chk\" (UniqueName: \"kubernetes.io/projected/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-kube-api-access-q5chk\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.562413 4644 generic.go:334] "Generic (PLEG): container finished" podID="710276eb-129b-4cf2-bbc4-2a41e70c5c1a" containerID="c0bc8df05da38c5395c879d9a8a49be37e250af1091073f8fde65fa7708f5e8e" exitCode=0 Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.562482 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.562509 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" event={"ID":"710276eb-129b-4cf2-bbc4-2a41e70c5c1a","Type":"ContainerDied","Data":"c0bc8df05da38c5395c879d9a8a49be37e250af1091073f8fde65fa7708f5e8e"} Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.562831 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-45ghl" event={"ID":"710276eb-129b-4cf2-bbc4-2a41e70c5c1a","Type":"ContainerDied","Data":"6829a9dc7bc7d3a63469f67aa920470995a2388f0b749712055da31e5c02aa13"} Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.562884 4644 scope.go:117] "RemoveContainer" containerID="c0bc8df05da38c5395c879d9a8a49be37e250af1091073f8fde65fa7708f5e8e" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.564538 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8a194627-4be2-4f29-ab01-c84d89070739","Type":"ContainerStarted","Data":"8377df6dc118b9a948ec7aa88749afcface5a72a5a091d7b200369c4d2851427"} Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.567702 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-txrxh" event={"ID":"6e2e749f-da1d-4692-9479-275cd00bc4b0","Type":"ContainerStarted","Data":"b8c8192ed2d9b30d5c742994ec6d583c2d0255d0057334521a5dc841cdc6b606"} Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.568082 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-txrxh" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.570669 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"829c5d95-3315-442d-8ec5-ed6a67497802","Type":"ContainerStarted","Data":"4a3a71a81139747d6f24744dab7a5df6f8c0e00c39e26752fe2aa6cf7b370521"} Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.572829 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"103d51fb-7ed3-487e-819f-bbcdfb2dea86","Type":"ContainerStarted","Data":"13ae653ef4b8fcd48fc3186e406a9f30456030af7700247e8ae17bdececa8cbc"} Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.572965 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.579471 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-cb7bh" event={"ID":"0d73b937-2604-4e87-867b-acdc34d21e5e","Type":"ContainerStarted","Data":"5d89e0b619426ef1b0952bd2bb4fce70e4cc83cbc0dd479921b3d08ce582ca94"} Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.579631 4644 scope.go:117] "RemoveContainer" containerID="5c98714e6c48e46915016628becfe385bd1b537ee77e8c61e90df3fcb9ac9468" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.581727 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"628995d0-5034-4f64-8c48-50eee052e5db","Type":"ContainerStarted","Data":"2e205ab8eedc8db08ffe602a78832cb3ea0bdf11e2b521d1e5d23ec822c4bfcd"} Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.582578 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.592867 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e8e96f9f-4c53-4df2-b818-c3341709594f","Type":"ContainerStarted","Data":"46d94f1555c2c2fed785348fad8c32206b460a388af7b49221dc20d2f0f345ef"} Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.594942 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-dpzsf" event={"ID":"1cb5b56c-b83a-4d43-b1fa-2a88580eceea","Type":"ContainerStarted","Data":"f830b343dacc87c4eca6ee2c0ec6d04f31f253b4c7dd3b8af0ae04114a017c54"} Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.608830 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=8.757340684 podStartE2EDuration="17.608808433s" podCreationTimestamp="2025-12-13 07:00:18 +0000 UTC" firstStartedPulling="2025-12-13 07:00:25.435667111 +0000 UTC m=+887.650617944" lastFinishedPulling="2025-12-13 07:00:34.287134859 +0000 UTC m=+896.502085693" observedRunningTime="2025-12-13 07:00:35.605937776 +0000 UTC m=+897.820888609" watchObservedRunningTime="2025-12-13 07:00:35.608808433 +0000 UTC m=+897.823759266" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.628208 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-txrxh" podStartSLOduration=4.441928553 podStartE2EDuration="12.628190015s" podCreationTimestamp="2025-12-13 07:00:23 +0000 UTC" firstStartedPulling="2025-12-13 07:00:26.103414146 +0000 UTC m=+888.318364978" lastFinishedPulling="2025-12-13 07:00:34.289675607 +0000 UTC m=+896.504626440" observedRunningTime="2025-12-13 07:00:35.620428001 +0000 UTC m=+897.835378824" watchObservedRunningTime="2025-12-13 07:00:35.628190015 +0000 UTC m=+897.843140848" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.651978 4644 scope.go:117] "RemoveContainer" containerID="c0bc8df05da38c5395c879d9a8a49be37e250af1091073f8fde65fa7708f5e8e" Dec 13 07:00:35 crc kubenswrapper[4644]: E1213 07:00:35.652981 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0bc8df05da38c5395c879d9a8a49be37e250af1091073f8fde65fa7708f5e8e\": container with ID starting with c0bc8df05da38c5395c879d9a8a49be37e250af1091073f8fde65fa7708f5e8e not found: ID does not exist" containerID="c0bc8df05da38c5395c879d9a8a49be37e250af1091073f8fde65fa7708f5e8e" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.653022 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0bc8df05da38c5395c879d9a8a49be37e250af1091073f8fde65fa7708f5e8e"} err="failed to get container status \"c0bc8df05da38c5395c879d9a8a49be37e250af1091073f8fde65fa7708f5e8e\": rpc error: code = NotFound desc = could not find container \"c0bc8df05da38c5395c879d9a8a49be37e250af1091073f8fde65fa7708f5e8e\": container with ID starting with c0bc8df05da38c5395c879d9a8a49be37e250af1091073f8fde65fa7708f5e8e not found: ID does not exist" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.653050 4644 scope.go:117] "RemoveContainer" containerID="5c98714e6c48e46915016628becfe385bd1b537ee77e8c61e90df3fcb9ac9468" Dec 13 07:00:35 crc kubenswrapper[4644]: E1213 07:00:35.653766 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c98714e6c48e46915016628becfe385bd1b537ee77e8c61e90df3fcb9ac9468\": container with ID starting with 5c98714e6c48e46915016628becfe385bd1b537ee77e8c61e90df3fcb9ac9468 not found: ID does not exist" containerID="5c98714e6c48e46915016628becfe385bd1b537ee77e8c61e90df3fcb9ac9468" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.653805 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c98714e6c48e46915016628becfe385bd1b537ee77e8c61e90df3fcb9ac9468"} err="failed to get container status \"5c98714e6c48e46915016628becfe385bd1b537ee77e8c61e90df3fcb9ac9468\": rpc error: code = NotFound desc = could not find container \"5c98714e6c48e46915016628becfe385bd1b537ee77e8c61e90df3fcb9ac9468\": container with ID starting with 5c98714e6c48e46915016628becfe385bd1b537ee77e8c61e90df3fcb9ac9468 not found: ID does not exist" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.662344 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=6.778726745 podStartE2EDuration="15.6623208s" podCreationTimestamp="2025-12-13 07:00:20 +0000 UTC" firstStartedPulling="2025-12-13 07:00:26.076790377 +0000 UTC m=+888.291741199" lastFinishedPulling="2025-12-13 07:00:34.960384421 +0000 UTC m=+897.175335254" observedRunningTime="2025-12-13 07:00:35.657418662 +0000 UTC m=+897.872369496" watchObservedRunningTime="2025-12-13 07:00:35.6623208 +0000 UTC m=+897.877271634" Dec 13 07:00:35 crc kubenswrapper[4644]: I1213 07:00:35.677502 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-dpzsf" podStartSLOduration=2.881721623 podStartE2EDuration="9.677481687s" podCreationTimestamp="2025-12-13 07:00:26 +0000 UTC" firstStartedPulling="2025-12-13 07:00:28.219157153 +0000 UTC m=+890.434107986" lastFinishedPulling="2025-12-13 07:00:35.014917216 +0000 UTC m=+897.229868050" observedRunningTime="2025-12-13 07:00:35.67301098 +0000 UTC m=+897.887961813" watchObservedRunningTime="2025-12-13 07:00:35.677481687 +0000 UTC m=+897.892432520" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.002568 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7878659675-rdwwg"] Dec 13 07:00:36 crc kubenswrapper[4644]: E1213 07:00:36.003180 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="710276eb-129b-4cf2-bbc4-2a41e70c5c1a" containerName="init" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.003200 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="710276eb-129b-4cf2-bbc4-2a41e70c5c1a" containerName="init" Dec 13 07:00:36 crc kubenswrapper[4644]: E1213 07:00:36.003223 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="710276eb-129b-4cf2-bbc4-2a41e70c5c1a" containerName="dnsmasq-dns" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.003230 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="710276eb-129b-4cf2-bbc4-2a41e70c5c1a" containerName="dnsmasq-dns" Dec 13 07:00:36 crc kubenswrapper[4644]: E1213 07:00:36.003248 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bcb1452-900e-4d5f-bd69-65d50257cd6c" containerName="init" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.003256 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bcb1452-900e-4d5f-bd69-65d50257cd6c" containerName="init" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.003413 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bcb1452-900e-4d5f-bd69-65d50257cd6c" containerName="init" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.003428 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="710276eb-129b-4cf2-bbc4-2a41e70c5c1a" containerName="dnsmasq-dns" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.004257 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.008846 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.019333 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7878659675-rdwwg"] Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.170279 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-dns-svc\") pod \"dnsmasq-dns-7878659675-rdwwg\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.170344 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p42p7\" (UniqueName: \"kubernetes.io/projected/47f5ebb6-8937-441f-9c9f-a1e31c401eed-kube-api-access-p42p7\") pod \"dnsmasq-dns-7878659675-rdwwg\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.170376 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-ovsdbserver-nb\") pod \"dnsmasq-dns-7878659675-rdwwg\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.170507 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-config\") pod \"dnsmasq-dns-7878659675-rdwwg\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.183666 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "710276eb-129b-4cf2-bbc4-2a41e70c5c1a" (UID: "710276eb-129b-4cf2-bbc4-2a41e70c5c1a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.188137 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-config" (OuterVolumeSpecName: "config") pod "710276eb-129b-4cf2-bbc4-2a41e70c5c1a" (UID: "710276eb-129b-4cf2-bbc4-2a41e70c5c1a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.193117 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7878659675-rdwwg"] Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.218494 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-jtk58"] Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.219927 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.222653 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.229970 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-jtk58"] Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.272524 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-config\") pod \"dnsmasq-dns-7878659675-rdwwg\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.272757 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-dns-svc\") pod \"dnsmasq-dns-7878659675-rdwwg\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.272800 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p42p7\" (UniqueName: \"kubernetes.io/projected/47f5ebb6-8937-441f-9c9f-a1e31c401eed-kube-api-access-p42p7\") pod \"dnsmasq-dns-7878659675-rdwwg\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.272825 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-ovsdbserver-nb\") pod \"dnsmasq-dns-7878659675-rdwwg\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.273201 4644 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.273218 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/710276eb-129b-4cf2-bbc4-2a41e70c5c1a-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.273985 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-config\") pod \"dnsmasq-dns-7878659675-rdwwg\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.274038 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-dns-svc\") pod \"dnsmasq-dns-7878659675-rdwwg\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.274099 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-ovsdbserver-nb\") pod \"dnsmasq-dns-7878659675-rdwwg\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.287088 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p42p7\" (UniqueName: \"kubernetes.io/projected/47f5ebb6-8937-441f-9c9f-a1e31c401eed-kube-api-access-p42p7\") pod \"dnsmasq-dns-7878659675-rdwwg\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.327626 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.375074 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-jtk58\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.375176 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-jtk58\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.375436 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-dns-svc\") pod \"dnsmasq-dns-586b989cdc-jtk58\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.375499 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hf8w\" (UniqueName: \"kubernetes.io/projected/b52e9cb2-f628-4a79-a112-4c567318e8d3-kube-api-access-6hf8w\") pod \"dnsmasq-dns-586b989cdc-jtk58\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.375561 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-config\") pod \"dnsmasq-dns-586b989cdc-jtk58\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.477142 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-jtk58\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.477515 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-jtk58\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.477757 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-dns-svc\") pod \"dnsmasq-dns-586b989cdc-jtk58\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.477789 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hf8w\" (UniqueName: \"kubernetes.io/projected/b52e9cb2-f628-4a79-a112-4c567318e8d3-kube-api-access-6hf8w\") pod \"dnsmasq-dns-586b989cdc-jtk58\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.477847 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-config\") pod \"dnsmasq-dns-586b989cdc-jtk58\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.478318 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-jtk58\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.478728 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-dns-svc\") pod \"dnsmasq-dns-586b989cdc-jtk58\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.479563 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-config\") pod \"dnsmasq-dns-586b989cdc-jtk58\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.479930 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-jtk58\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.482367 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-45ghl"] Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.488470 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-45ghl"] Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.498502 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hf8w\" (UniqueName: \"kubernetes.io/projected/b52e9cb2-f628-4a79-a112-4c567318e8d3-kube-api-access-6hf8w\") pod \"dnsmasq-dns-586b989cdc-jtk58\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.604591 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"40233f71-8156-4715-adce-ef3fee2102e2","Type":"ContainerStarted","Data":"8511ea058f4718449cd382a47652cbd2ec9f5aee535284959e2e4b53329372ac"} Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.604634 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"40233f71-8156-4715-adce-ef3fee2102e2","Type":"ContainerStarted","Data":"a013e624f4beb7ad5ca20e66802a3d221045017053509af65dba6b06ea4aa48d"} Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.607648 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0dd20500-a2dd-4608-a3c8-7d714ffb09c4","Type":"ContainerStarted","Data":"7fb838eb12d8d62aadffa1f046880d9513f421e1f29a5d58016251c007b5e95d"} Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.609285 4644 generic.go:334] "Generic (PLEG): container finished" podID="0d73b937-2604-4e87-867b-acdc34d21e5e" containerID="5d89e0b619426ef1b0952bd2bb4fce70e4cc83cbc0dd479921b3d08ce582ca94" exitCode=0 Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.609328 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-cb7bh" event={"ID":"0d73b937-2604-4e87-867b-acdc34d21e5e","Type":"ContainerDied","Data":"5d89e0b619426ef1b0952bd2bb4fce70e4cc83cbc0dd479921b3d08ce582ca94"} Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.611117 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"829c5d95-3315-442d-8ec5-ed6a67497802","Type":"ContainerStarted","Data":"af3de5c4a2c86c92a636f1058cd99b7f2038463839e5232f48fa78554de303d1"} Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.614160 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"706c4700-9d13-4bac-b7ac-0c19c09cc7e7","Type":"ContainerStarted","Data":"70a5774436468fb133077a60d251233d249cc290e92dd10a84f76c87a4876089"} Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.634160 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.646014 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=4.975381032 podStartE2EDuration="11.645992174s" podCreationTimestamp="2025-12-13 07:00:25 +0000 UTC" firstStartedPulling="2025-12-13 07:00:28.221004425 +0000 UTC m=+890.435955259" lastFinishedPulling="2025-12-13 07:00:34.891615568 +0000 UTC m=+897.106566401" observedRunningTime="2025-12-13 07:00:36.626104028 +0000 UTC m=+898.841054861" watchObservedRunningTime="2025-12-13 07:00:36.645992174 +0000 UTC m=+898.860943007" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.681768 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=5.620733344 podStartE2EDuration="10.681749807s" podCreationTimestamp="2025-12-13 07:00:26 +0000 UTC" firstStartedPulling="2025-12-13 07:00:29.831097131 +0000 UTC m=+892.046047964" lastFinishedPulling="2025-12-13 07:00:34.892113594 +0000 UTC m=+897.107064427" observedRunningTime="2025-12-13 07:00:36.680479158 +0000 UTC m=+898.895429991" watchObservedRunningTime="2025-12-13 07:00:36.681749807 +0000 UTC m=+898.896700640" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.724764 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7878659675-rdwwg"] Dec 13 07:00:36 crc kubenswrapper[4644]: W1213 07:00:36.729481 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47f5ebb6_8937_441f_9c9f_a1e31c401eed.slice/crio-b8fc48432f72b692739947981613bbb69944b32e6c437e9a01d15154f417b1b5 WatchSource:0}: Error finding container b8fc48432f72b692739947981613bbb69944b32e6c437e9a01d15154f417b1b5: Status 404 returned error can't find the container with id b8fc48432f72b692739947981613bbb69944b32e6c437e9a01d15154f417b1b5 Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.857585 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:36 crc kubenswrapper[4644]: I1213 07:00:36.934566 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.064464 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-jtk58"] Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.623066 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-cb7bh" event={"ID":"0d73b937-2604-4e87-867b-acdc34d21e5e","Type":"ContainerStarted","Data":"04e4b81536e0334e8824dcf55bd4ce3d71ae352b2152f0ee54e7869cdfbb74e3"} Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.623298 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.623311 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-cb7bh" event={"ID":"0d73b937-2604-4e87-867b-acdc34d21e5e","Type":"ContainerStarted","Data":"66e23977ff4697db822b06129d645ad0f1ea4013ac7161431f3ee53067d5b2b6"} Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.625572 4644 generic.go:334] "Generic (PLEG): container finished" podID="b52e9cb2-f628-4a79-a112-4c567318e8d3" containerID="6e0d283977ffe4a42c805a7d8d2a6206ca691c76c1a172a21daffc3fd61a4f7b" exitCode=0 Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.625652 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-jtk58" event={"ID":"b52e9cb2-f628-4a79-a112-4c567318e8d3","Type":"ContainerDied","Data":"6e0d283977ffe4a42c805a7d8d2a6206ca691c76c1a172a21daffc3fd61a4f7b"} Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.625677 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-jtk58" event={"ID":"b52e9cb2-f628-4a79-a112-4c567318e8d3","Type":"ContainerStarted","Data":"66fd34dd4d8a13957072d640dd724b3ebdcc55c10619c9902ac0c1e9dff8969d"} Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.627664 4644 generic.go:334] "Generic (PLEG): container finished" podID="47f5ebb6-8937-441f-9c9f-a1e31c401eed" containerID="f78f340e866c47b63dbaf3e9eb8a3c0fdf2119d5d4f6ca8ed49983e13c0b9cce" exitCode=0 Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.627818 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7878659675-rdwwg" event={"ID":"47f5ebb6-8937-441f-9c9f-a1e31c401eed","Type":"ContainerDied","Data":"f78f340e866c47b63dbaf3e9eb8a3c0fdf2119d5d4f6ca8ed49983e13c0b9cce"} Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.627883 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7878659675-rdwwg" event={"ID":"47f5ebb6-8937-441f-9c9f-a1e31c401eed","Type":"ContainerStarted","Data":"b8fc48432f72b692739947981613bbb69944b32e6c437e9a01d15154f417b1b5"} Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.644109 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-cb7bh" podStartSLOduration=7.512199117 podStartE2EDuration="14.643424701s" podCreationTimestamp="2025-12-13 07:00:23 +0000 UTC" firstStartedPulling="2025-12-13 07:00:26.204568682 +0000 UTC m=+888.419519515" lastFinishedPulling="2025-12-13 07:00:33.335794267 +0000 UTC m=+895.550745099" observedRunningTime="2025-12-13 07:00:37.640753809 +0000 UTC m=+899.855704642" watchObservedRunningTime="2025-12-13 07:00:37.643424701 +0000 UTC m=+899.858375525" Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.841901 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.857293 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.912200 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-dns-svc\") pod \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.912327 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p42p7\" (UniqueName: \"kubernetes.io/projected/47f5ebb6-8937-441f-9c9f-a1e31c401eed-kube-api-access-p42p7\") pod \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.912410 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-ovsdbserver-nb\") pod \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.912454 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-config\") pod \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\" (UID: \"47f5ebb6-8937-441f-9c9f-a1e31c401eed\") " Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.916913 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47f5ebb6-8937-441f-9c9f-a1e31c401eed-kube-api-access-p42p7" (OuterVolumeSpecName: "kube-api-access-p42p7") pod "47f5ebb6-8937-441f-9c9f-a1e31c401eed" (UID: "47f5ebb6-8937-441f-9c9f-a1e31c401eed"). InnerVolumeSpecName "kube-api-access-p42p7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.927977 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "47f5ebb6-8937-441f-9c9f-a1e31c401eed" (UID: "47f5ebb6-8937-441f-9c9f-a1e31c401eed"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.928251 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "47f5ebb6-8937-441f-9c9f-a1e31c401eed" (UID: "47f5ebb6-8937-441f-9c9f-a1e31c401eed"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:37 crc kubenswrapper[4644]: I1213 07:00:37.928801 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-config" (OuterVolumeSpecName: "config") pod "47f5ebb6-8937-441f-9c9f-a1e31c401eed" (UID: "47f5ebb6-8937-441f-9c9f-a1e31c401eed"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.013697 4644 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.013726 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p42p7\" (UniqueName: \"kubernetes.io/projected/47f5ebb6-8937-441f-9c9f-a1e31c401eed-kube-api-access-p42p7\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.013740 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.013749 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47f5ebb6-8937-441f-9c9f-a1e31c401eed-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.397629 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="710276eb-129b-4cf2-bbc4-2a41e70c5c1a" path="/var/lib/kubelet/pods/710276eb-129b-4cf2-bbc4-2a41e70c5c1a/volumes" Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.646111 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-jtk58" event={"ID":"b52e9cb2-f628-4a79-a112-4c567318e8d3","Type":"ContainerStarted","Data":"d850d996273d40330b3a0f39a43b40aa30be120a0e88e23e5fa7a7ac24aac8ef"} Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.646240 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.648028 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-rdwwg" Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.648095 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7878659675-rdwwg" event={"ID":"47f5ebb6-8937-441f-9c9f-a1e31c401eed","Type":"ContainerDied","Data":"b8fc48432f72b692739947981613bbb69944b32e6c437e9a01d15154f417b1b5"} Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.648132 4644 scope.go:117] "RemoveContainer" containerID="f78f340e866c47b63dbaf3e9eb8a3c0fdf2119d5d4f6ca8ed49983e13c0b9cce" Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.649113 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.663184 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-586b989cdc-jtk58" podStartSLOduration=2.6631675379999997 podStartE2EDuration="2.663167538s" podCreationTimestamp="2025-12-13 07:00:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:00:38.660963614 +0000 UTC m=+900.875914437" watchObservedRunningTime="2025-12-13 07:00:38.663167538 +0000 UTC m=+900.878118372" Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.692424 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7878659675-rdwwg"] Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.697238 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7878659675-rdwwg"] Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.932255 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:38 crc kubenswrapper[4644]: I1213 07:00:38.958846 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:39 crc kubenswrapper[4644]: I1213 07:00:39.654817 4644 generic.go:334] "Generic (PLEG): container finished" podID="8a194627-4be2-4f29-ab01-c84d89070739" containerID="8377df6dc118b9a948ec7aa88749afcface5a72a5a091d7b200369c4d2851427" exitCode=0 Dec 13 07:00:39 crc kubenswrapper[4644]: I1213 07:00:39.654906 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8a194627-4be2-4f29-ab01-c84d89070739","Type":"ContainerDied","Data":"8377df6dc118b9a948ec7aa88749afcface5a72a5a091d7b200369c4d2851427"} Dec 13 07:00:39 crc kubenswrapper[4644]: I1213 07:00:39.656367 4644 generic.go:334] "Generic (PLEG): container finished" podID="e8e96f9f-4c53-4df2-b818-c3341709594f" containerID="46d94f1555c2c2fed785348fad8c32206b460a388af7b49221dc20d2f0f345ef" exitCode=0 Dec 13 07:00:39 crc kubenswrapper[4644]: I1213 07:00:39.656510 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e8e96f9f-4c53-4df2-b818-c3341709594f","Type":"ContainerDied","Data":"46d94f1555c2c2fed785348fad8c32206b460a388af7b49221dc20d2f0f345ef"} Dec 13 07:00:39 crc kubenswrapper[4644]: I1213 07:00:39.754309 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:00:39 crc kubenswrapper[4644]: I1213 07:00:39.754349 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:00:39 crc kubenswrapper[4644]: I1213 07:00:39.884131 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.396234 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47f5ebb6-8937-441f-9c9f-a1e31c401eed" path="/var/lib/kubelet/pods/47f5ebb6-8937-441f-9c9f-a1e31c401eed/volumes" Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.663619 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e8e96f9f-4c53-4df2-b818-c3341709594f","Type":"ContainerStarted","Data":"26ddbaab3563b720daf7b368360b7c46543ce4bf7eee532eccf911bc51f0b24a"} Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.665245 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8a194627-4be2-4f29-ab01-c84d89070739","Type":"ContainerStarted","Data":"42a6aab88788f2b1aa6f92f9131580039149a4a708dd02baf465856afb93c5d6"} Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.683350 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=14.227362235 podStartE2EDuration="23.683333655s" podCreationTimestamp="2025-12-13 07:00:17 +0000 UTC" firstStartedPulling="2025-12-13 07:00:25.435591098 +0000 UTC m=+887.650541931" lastFinishedPulling="2025-12-13 07:00:34.891562518 +0000 UTC m=+897.106513351" observedRunningTime="2025-12-13 07:00:40.67801783 +0000 UTC m=+902.892968663" watchObservedRunningTime="2025-12-13 07:00:40.683333655 +0000 UTC m=+902.898284489" Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.692387 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.695897 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=16.913762264 podStartE2EDuration="25.695883943s" podCreationTimestamp="2025-12-13 07:00:15 +0000 UTC" firstStartedPulling="2025-12-13 07:00:25.507669453 +0000 UTC m=+887.722620287" lastFinishedPulling="2025-12-13 07:00:34.289791133 +0000 UTC m=+896.504741966" observedRunningTime="2025-12-13 07:00:40.692038774 +0000 UTC m=+902.906989617" watchObservedRunningTime="2025-12-13 07:00:40.695883943 +0000 UTC m=+902.910834776" Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.697796 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.911060 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 13 07:00:40 crc kubenswrapper[4644]: E1213 07:00:40.911374 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47f5ebb6-8937-441f-9c9f-a1e31c401eed" containerName="init" Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.911393 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="47f5ebb6-8937-441f-9c9f-a1e31c401eed" containerName="init" Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.911602 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="47f5ebb6-8937-441f-9c9f-a1e31c401eed" containerName="init" Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.912341 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.914823 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.915202 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.915332 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-4qh2d" Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.915465 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 13 07:00:40 crc kubenswrapper[4644]: I1213 07:00:40.952580 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.066481 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.069981 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/200e0c3e-a409-4cad-8ed3-de1f4f209091-scripts\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.070207 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/200e0c3e-a409-4cad-8ed3-de1f4f209091-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.070356 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbnrz\" (UniqueName: \"kubernetes.io/projected/200e0c3e-a409-4cad-8ed3-de1f4f209091-kube-api-access-xbnrz\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.070491 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/200e0c3e-a409-4cad-8ed3-de1f4f209091-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.070617 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/200e0c3e-a409-4cad-8ed3-de1f4f209091-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.070739 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/200e0c3e-a409-4cad-8ed3-de1f4f209091-config\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.070848 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/200e0c3e-a409-4cad-8ed3-de1f4f209091-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.172835 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/200e0c3e-a409-4cad-8ed3-de1f4f209091-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.172922 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/200e0c3e-a409-4cad-8ed3-de1f4f209091-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.172961 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/200e0c3e-a409-4cad-8ed3-de1f4f209091-config\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.172983 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/200e0c3e-a409-4cad-8ed3-de1f4f209091-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.173462 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/200e0c3e-a409-4cad-8ed3-de1f4f209091-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.173886 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/200e0c3e-a409-4cad-8ed3-de1f4f209091-config\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.174106 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/200e0c3e-a409-4cad-8ed3-de1f4f209091-scripts\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.174217 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/200e0c3e-a409-4cad-8ed3-de1f4f209091-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.174242 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbnrz\" (UniqueName: \"kubernetes.io/projected/200e0c3e-a409-4cad-8ed3-de1f4f209091-kube-api-access-xbnrz\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.174801 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/200e0c3e-a409-4cad-8ed3-de1f4f209091-scripts\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.181123 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/200e0c3e-a409-4cad-8ed3-de1f4f209091-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.182938 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/200e0c3e-a409-4cad-8ed3-de1f4f209091-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.183054 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/200e0c3e-a409-4cad-8ed3-de1f4f209091-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.189090 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbnrz\" (UniqueName: \"kubernetes.io/projected/200e0c3e-a409-4cad-8ed3-de1f4f209091-kube-api-access-xbnrz\") pod \"ovn-northd-0\" (UID: \"200e0c3e-a409-4cad-8ed3-de1f4f209091\") " pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.229237 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.600466 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 13 07:00:41 crc kubenswrapper[4644]: I1213 07:00:41.672158 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"200e0c3e-a409-4cad-8ed3-de1f4f209091","Type":"ContainerStarted","Data":"cdeea6bd6f10e74836e4f2e82ef545be183d80a8437a2a1090b83af7f769149b"} Dec 13 07:00:43 crc kubenswrapper[4644]: I1213 07:00:43.684913 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"200e0c3e-a409-4cad-8ed3-de1f4f209091","Type":"ContainerStarted","Data":"8976d4907bacb34015202272c7fd136f4482c7acb2b589daabbf259a658ff353"} Dec 13 07:00:43 crc kubenswrapper[4644]: I1213 07:00:43.685491 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"200e0c3e-a409-4cad-8ed3-de1f4f209091","Type":"ContainerStarted","Data":"edcb5ac15807506d99a303b81b8c3495d140f6b5591712eac100985206143b1b"} Dec 13 07:00:43 crc kubenswrapper[4644]: I1213 07:00:43.685539 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 13 07:00:43 crc kubenswrapper[4644]: I1213 07:00:43.705371 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.318572071 podStartE2EDuration="3.70535125s" podCreationTimestamp="2025-12-13 07:00:40 +0000 UTC" firstStartedPulling="2025-12-13 07:00:41.606784399 +0000 UTC m=+903.821735231" lastFinishedPulling="2025-12-13 07:00:42.993563578 +0000 UTC m=+905.208514410" observedRunningTime="2025-12-13 07:00:43.701102632 +0000 UTC m=+905.916053465" watchObservedRunningTime="2025-12-13 07:00:43.70535125 +0000 UTC m=+905.920302083" Dec 13 07:00:43 crc kubenswrapper[4644]: I1213 07:00:43.874428 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 13 07:00:46 crc kubenswrapper[4644]: I1213 07:00:46.635622 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:00:46 crc kubenswrapper[4644]: I1213 07:00:46.682638 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-6gkc2"] Dec 13 07:00:46 crc kubenswrapper[4644]: I1213 07:00:46.682845 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" podUID="a6331da5-ec97-4e23-8a5d-a9977be81c72" containerName="dnsmasq-dns" containerID="cri-o://c1567e4bf3fa32b226008aad08431f9ee08b4755e9c01916863331864ee65ffe" gracePeriod=10 Dec 13 07:00:47 crc kubenswrapper[4644]: I1213 07:00:47.125683 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 13 07:00:47 crc kubenswrapper[4644]: I1213 07:00:47.126580 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 13 07:00:47 crc kubenswrapper[4644]: I1213 07:00:47.194319 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 13 07:00:47 crc kubenswrapper[4644]: E1213 07:00:47.701246 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47f5ebb6_8937_441f_9c9f_a1e31c401eed.slice/crio-b8fc48432f72b692739947981613bbb69944b32e6c437e9a01d15154f417b1b5\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47f5ebb6_8937_441f_9c9f_a1e31c401eed.slice\": RecentStats: unable to find data in memory cache]" Dec 13 07:00:47 crc kubenswrapper[4644]: I1213 07:00:47.712551 4644 generic.go:334] "Generic (PLEG): container finished" podID="a6331da5-ec97-4e23-8a5d-a9977be81c72" containerID="c1567e4bf3fa32b226008aad08431f9ee08b4755e9c01916863331864ee65ffe" exitCode=0 Dec 13 07:00:47 crc kubenswrapper[4644]: I1213 07:00:47.712640 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" event={"ID":"a6331da5-ec97-4e23-8a5d-a9977be81c72","Type":"ContainerDied","Data":"c1567e4bf3fa32b226008aad08431f9ee08b4755e9c01916863331864ee65ffe"} Dec 13 07:00:47 crc kubenswrapper[4644]: I1213 07:00:47.773228 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.518099 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-f342-account-create-update-nlr8s"] Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.520810 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f342-account-create-update-nlr8s" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.522837 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.535102 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-f342-account-create-update-nlr8s"] Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.544413 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.544465 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.573862 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-5m9w2"] Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.575149 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5m9w2" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.581540 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-5m9w2"] Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.600480 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76fa9df7-1c14-4734-94ba-8dc4d4b64ad3-operator-scripts\") pod \"keystone-f342-account-create-update-nlr8s\" (UID: \"76fa9df7-1c14-4734-94ba-8dc4d4b64ad3\") " pod="openstack/keystone-f342-account-create-update-nlr8s" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.600533 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6dpn\" (UniqueName: \"kubernetes.io/projected/76fa9df7-1c14-4734-94ba-8dc4d4b64ad3-kube-api-access-b6dpn\") pod \"keystone-f342-account-create-update-nlr8s\" (UID: \"76fa9df7-1c14-4734-94ba-8dc4d4b64ad3\") " pod="openstack/keystone-f342-account-create-update-nlr8s" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.620971 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.701617 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/426e7525-babf-4c8e-aa46-e8bfa39968f1-operator-scripts\") pod \"keystone-db-create-5m9w2\" (UID: \"426e7525-babf-4c8e-aa46-e8bfa39968f1\") " pod="openstack/keystone-db-create-5m9w2" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.701784 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76fa9df7-1c14-4734-94ba-8dc4d4b64ad3-operator-scripts\") pod \"keystone-f342-account-create-update-nlr8s\" (UID: \"76fa9df7-1c14-4734-94ba-8dc4d4b64ad3\") " pod="openstack/keystone-f342-account-create-update-nlr8s" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.701810 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6dpn\" (UniqueName: \"kubernetes.io/projected/76fa9df7-1c14-4734-94ba-8dc4d4b64ad3-kube-api-access-b6dpn\") pod \"keystone-f342-account-create-update-nlr8s\" (UID: \"76fa9df7-1c14-4734-94ba-8dc4d4b64ad3\") " pod="openstack/keystone-f342-account-create-update-nlr8s" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.701837 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cb6v4\" (UniqueName: \"kubernetes.io/projected/426e7525-babf-4c8e-aa46-e8bfa39968f1-kube-api-access-cb6v4\") pod \"keystone-db-create-5m9w2\" (UID: \"426e7525-babf-4c8e-aa46-e8bfa39968f1\") " pod="openstack/keystone-db-create-5m9w2" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.702905 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76fa9df7-1c14-4734-94ba-8dc4d4b64ad3-operator-scripts\") pod \"keystone-f342-account-create-update-nlr8s\" (UID: \"76fa9df7-1c14-4734-94ba-8dc4d4b64ad3\") " pod="openstack/keystone-f342-account-create-update-nlr8s" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.732515 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6dpn\" (UniqueName: \"kubernetes.io/projected/76fa9df7-1c14-4734-94ba-8dc4d4b64ad3-kube-api-access-b6dpn\") pod \"keystone-f342-account-create-update-nlr8s\" (UID: \"76fa9df7-1c14-4734-94ba-8dc4d4b64ad3\") " pod="openstack/keystone-f342-account-create-update-nlr8s" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.770662 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-xfcdr"] Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.771764 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-xfcdr" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.783194 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-xfcdr"] Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.806981 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/426e7525-babf-4c8e-aa46-e8bfa39968f1-operator-scripts\") pod \"keystone-db-create-5m9w2\" (UID: \"426e7525-babf-4c8e-aa46-e8bfa39968f1\") " pod="openstack/keystone-db-create-5m9w2" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.807819 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/426e7525-babf-4c8e-aa46-e8bfa39968f1-operator-scripts\") pod \"keystone-db-create-5m9w2\" (UID: \"426e7525-babf-4c8e-aa46-e8bfa39968f1\") " pod="openstack/keystone-db-create-5m9w2" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.807372 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cb6v4\" (UniqueName: \"kubernetes.io/projected/426e7525-babf-4c8e-aa46-e8bfa39968f1-kube-api-access-cb6v4\") pod \"keystone-db-create-5m9w2\" (UID: \"426e7525-babf-4c8e-aa46-e8bfa39968f1\") " pod="openstack/keystone-db-create-5m9w2" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.831668 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cb6v4\" (UniqueName: \"kubernetes.io/projected/426e7525-babf-4c8e-aa46-e8bfa39968f1-kube-api-access-cb6v4\") pod \"keystone-db-create-5m9w2\" (UID: \"426e7525-babf-4c8e-aa46-e8bfa39968f1\") " pod="openstack/keystone-db-create-5m9w2" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.835407 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f342-account-create-update-nlr8s" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.839564 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.848486 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-c77f-account-create-update-8jqkn"] Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.849596 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c77f-account-create-update-8jqkn" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.855249 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.875638 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-c77f-account-create-update-8jqkn"] Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.902963 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5m9w2" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.918367 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bb2lf\" (UniqueName: \"kubernetes.io/projected/17497e45-c9ee-486a-a743-651b0447f79d-kube-api-access-bb2lf\") pod \"placement-db-create-xfcdr\" (UID: \"17497e45-c9ee-486a-a743-651b0447f79d\") " pod="openstack/placement-db-create-xfcdr" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.918428 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xvpg\" (UniqueName: \"kubernetes.io/projected/7c1b1c7c-3447-400f-a82e-0d6a130b815f-kube-api-access-2xvpg\") pod \"placement-c77f-account-create-update-8jqkn\" (UID: \"7c1b1c7c-3447-400f-a82e-0d6a130b815f\") " pod="openstack/placement-c77f-account-create-update-8jqkn" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.918600 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c1b1c7c-3447-400f-a82e-0d6a130b815f-operator-scripts\") pod \"placement-c77f-account-create-update-8jqkn\" (UID: \"7c1b1c7c-3447-400f-a82e-0d6a130b815f\") " pod="openstack/placement-c77f-account-create-update-8jqkn" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.918752 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17497e45-c9ee-486a-a743-651b0447f79d-operator-scripts\") pod \"placement-db-create-xfcdr\" (UID: \"17497e45-c9ee-486a-a743-651b0447f79d\") " pod="openstack/placement-db-create-xfcdr" Dec 13 07:00:48 crc kubenswrapper[4644]: I1213 07:00:48.999058 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.027206 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bb2lf\" (UniqueName: \"kubernetes.io/projected/17497e45-c9ee-486a-a743-651b0447f79d-kube-api-access-bb2lf\") pod \"placement-db-create-xfcdr\" (UID: \"17497e45-c9ee-486a-a743-651b0447f79d\") " pod="openstack/placement-db-create-xfcdr" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.027265 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xvpg\" (UniqueName: \"kubernetes.io/projected/7c1b1c7c-3447-400f-a82e-0d6a130b815f-kube-api-access-2xvpg\") pod \"placement-c77f-account-create-update-8jqkn\" (UID: \"7c1b1c7c-3447-400f-a82e-0d6a130b815f\") " pod="openstack/placement-c77f-account-create-update-8jqkn" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.027416 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c1b1c7c-3447-400f-a82e-0d6a130b815f-operator-scripts\") pod \"placement-c77f-account-create-update-8jqkn\" (UID: \"7c1b1c7c-3447-400f-a82e-0d6a130b815f\") " pod="openstack/placement-c77f-account-create-update-8jqkn" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.028072 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17497e45-c9ee-486a-a743-651b0447f79d-operator-scripts\") pod \"placement-db-create-xfcdr\" (UID: \"17497e45-c9ee-486a-a743-651b0447f79d\") " pod="openstack/placement-db-create-xfcdr" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.032833 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c1b1c7c-3447-400f-a82e-0d6a130b815f-operator-scripts\") pod \"placement-c77f-account-create-update-8jqkn\" (UID: \"7c1b1c7c-3447-400f-a82e-0d6a130b815f\") " pod="openstack/placement-c77f-account-create-update-8jqkn" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.032841 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17497e45-c9ee-486a-a743-651b0447f79d-operator-scripts\") pod \"placement-db-create-xfcdr\" (UID: \"17497e45-c9ee-486a-a743-651b0447f79d\") " pod="openstack/placement-db-create-xfcdr" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.047101 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xvpg\" (UniqueName: \"kubernetes.io/projected/7c1b1c7c-3447-400f-a82e-0d6a130b815f-kube-api-access-2xvpg\") pod \"placement-c77f-account-create-update-8jqkn\" (UID: \"7c1b1c7c-3447-400f-a82e-0d6a130b815f\") " pod="openstack/placement-c77f-account-create-update-8jqkn" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.047187 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bb2lf\" (UniqueName: \"kubernetes.io/projected/17497e45-c9ee-486a-a743-651b0447f79d-kube-api-access-bb2lf\") pod \"placement-db-create-xfcdr\" (UID: \"17497e45-c9ee-486a-a743-651b0447f79d\") " pod="openstack/placement-db-create-xfcdr" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.087065 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-xfcdr" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.129649 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zk9rj\" (UniqueName: \"kubernetes.io/projected/a6331da5-ec97-4e23-8a5d-a9977be81c72-kube-api-access-zk9rj\") pod \"a6331da5-ec97-4e23-8a5d-a9977be81c72\" (UID: \"a6331da5-ec97-4e23-8a5d-a9977be81c72\") " Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.129705 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6331da5-ec97-4e23-8a5d-a9977be81c72-config\") pod \"a6331da5-ec97-4e23-8a5d-a9977be81c72\" (UID: \"a6331da5-ec97-4e23-8a5d-a9977be81c72\") " Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.129751 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6331da5-ec97-4e23-8a5d-a9977be81c72-dns-svc\") pod \"a6331da5-ec97-4e23-8a5d-a9977be81c72\" (UID: \"a6331da5-ec97-4e23-8a5d-a9977be81c72\") " Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.133334 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6331da5-ec97-4e23-8a5d-a9977be81c72-kube-api-access-zk9rj" (OuterVolumeSpecName: "kube-api-access-zk9rj") pod "a6331da5-ec97-4e23-8a5d-a9977be81c72" (UID: "a6331da5-ec97-4e23-8a5d-a9977be81c72"). InnerVolumeSpecName "kube-api-access-zk9rj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.168974 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6331da5-ec97-4e23-8a5d-a9977be81c72-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a6331da5-ec97-4e23-8a5d-a9977be81c72" (UID: "a6331da5-ec97-4e23-8a5d-a9977be81c72"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.170052 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6331da5-ec97-4e23-8a5d-a9977be81c72-config" (OuterVolumeSpecName: "config") pod "a6331da5-ec97-4e23-8a5d-a9977be81c72" (UID: "a6331da5-ec97-4e23-8a5d-a9977be81c72"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.231224 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zk9rj\" (UniqueName: \"kubernetes.io/projected/a6331da5-ec97-4e23-8a5d-a9977be81c72-kube-api-access-zk9rj\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.231260 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6331da5-ec97-4e23-8a5d-a9977be81c72-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.231270 4644 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a6331da5-ec97-4e23-8a5d-a9977be81c72-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.233769 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c77f-account-create-update-8jqkn" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.333278 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-f342-account-create-update-nlr8s"] Dec 13 07:00:49 crc kubenswrapper[4644]: W1213 07:00:49.337310 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76fa9df7_1c14_4734_94ba_8dc4d4b64ad3.slice/crio-575cb8dbc57092b694cee7674bb4023b78f07dd60771b4f5e1631160f7490db4 WatchSource:0}: Error finding container 575cb8dbc57092b694cee7674bb4023b78f07dd60771b4f5e1631160f7490db4: Status 404 returned error can't find the container with id 575cb8dbc57092b694cee7674bb4023b78f07dd60771b4f5e1631160f7490db4 Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.422566 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-5m9w2"] Dec 13 07:00:49 crc kubenswrapper[4644]: W1213 07:00:49.431585 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod426e7525_babf_4c8e_aa46_e8bfa39968f1.slice/crio-9750f72005d7896f5596bce03d4d643e0add904b4d6639ebf0be8d4782ef1630 WatchSource:0}: Error finding container 9750f72005d7896f5596bce03d4d643e0add904b4d6639ebf0be8d4782ef1630: Status 404 returned error can't find the container with id 9750f72005d7896f5596bce03d4d643e0add904b4d6639ebf0be8d4782ef1630 Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.481175 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-xfcdr"] Dec 13 07:00:49 crc kubenswrapper[4644]: W1213 07:00:49.487196 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17497e45_c9ee_486a_a743_651b0447f79d.slice/crio-ced3838c608e06b813dc9a5de2b9857139e2d43a391952cfca56bab302077c92 WatchSource:0}: Error finding container ced3838c608e06b813dc9a5de2b9857139e2d43a391952cfca56bab302077c92: Status 404 returned error can't find the container with id ced3838c608e06b813dc9a5de2b9857139e2d43a391952cfca56bab302077c92 Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.613857 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-c77f-account-create-update-8jqkn"] Dec 13 07:00:49 crc kubenswrapper[4644]: W1213 07:00:49.687662 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c1b1c7c_3447_400f_a82e_0d6a130b815f.slice/crio-207ce72fbf5901656628c83aa8f3ea7e76753b9c7f209d095e98a077ba74f039 WatchSource:0}: Error finding container 207ce72fbf5901656628c83aa8f3ea7e76753b9c7f209d095e98a077ba74f039: Status 404 returned error can't find the container with id 207ce72fbf5901656628c83aa8f3ea7e76753b9c7f209d095e98a077ba74f039 Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.728402 4644 generic.go:334] "Generic (PLEG): container finished" podID="76fa9df7-1c14-4734-94ba-8dc4d4b64ad3" containerID="ecba3c6050340d8a7cca5e5586e13bcd67e498738dfbc0e2ddd1b1c804a60713" exitCode=0 Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.728521 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f342-account-create-update-nlr8s" event={"ID":"76fa9df7-1c14-4734-94ba-8dc4d4b64ad3","Type":"ContainerDied","Data":"ecba3c6050340d8a7cca5e5586e13bcd67e498738dfbc0e2ddd1b1c804a60713"} Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.728559 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f342-account-create-update-nlr8s" event={"ID":"76fa9df7-1c14-4734-94ba-8dc4d4b64ad3","Type":"ContainerStarted","Data":"575cb8dbc57092b694cee7674bb4023b78f07dd60771b4f5e1631160f7490db4"} Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.731917 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-xfcdr" event={"ID":"17497e45-c9ee-486a-a743-651b0447f79d","Type":"ContainerStarted","Data":"636f26a89a93e532cd2f574d0decf187977b0fad55f75b61415d39103173495f"} Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.731950 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-xfcdr" event={"ID":"17497e45-c9ee-486a-a743-651b0447f79d","Type":"ContainerStarted","Data":"ced3838c608e06b813dc9a5de2b9857139e2d43a391952cfca56bab302077c92"} Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.733327 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c77f-account-create-update-8jqkn" event={"ID":"7c1b1c7c-3447-400f-a82e-0d6a130b815f","Type":"ContainerStarted","Data":"207ce72fbf5901656628c83aa8f3ea7e76753b9c7f209d095e98a077ba74f039"} Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.734293 4644 generic.go:334] "Generic (PLEG): container finished" podID="426e7525-babf-4c8e-aa46-e8bfa39968f1" containerID="a99b83b050cf98c6bdba04caf580b9eeb507e999adf66935482e96eb3f007008" exitCode=0 Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.734339 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5m9w2" event={"ID":"426e7525-babf-4c8e-aa46-e8bfa39968f1","Type":"ContainerDied","Data":"a99b83b050cf98c6bdba04caf580b9eeb507e999adf66935482e96eb3f007008"} Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.734407 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5m9w2" event={"ID":"426e7525-babf-4c8e-aa46-e8bfa39968f1","Type":"ContainerStarted","Data":"9750f72005d7896f5596bce03d4d643e0add904b4d6639ebf0be8d4782ef1630"} Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.735951 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.736002 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-6gkc2" event={"ID":"a6331da5-ec97-4e23-8a5d-a9977be81c72","Type":"ContainerDied","Data":"356b821ab36cfae4edadc74d3813bdd04d32ef8e1501cb96b900f1d41cfa1076"} Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.736162 4644 scope.go:117] "RemoveContainer" containerID="c1567e4bf3fa32b226008aad08431f9ee08b4755e9c01916863331864ee65ffe" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.766556 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-xfcdr" podStartSLOduration=1.766538389 podStartE2EDuration="1.766538389s" podCreationTimestamp="2025-12-13 07:00:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:00:49.764773902 +0000 UTC m=+911.979724734" watchObservedRunningTime="2025-12-13 07:00:49.766538389 +0000 UTC m=+911.981489222" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.846336 4644 scope.go:117] "RemoveContainer" containerID="c87413c12414e7a737c8e69c610b761f48e5d3c7d88ec6b6118092959e09b26b" Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.874701 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-6gkc2"] Dec 13 07:00:49 crc kubenswrapper[4644]: I1213 07:00:49.880925 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-6gkc2"] Dec 13 07:00:50 crc kubenswrapper[4644]: I1213 07:00:50.397594 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6331da5-ec97-4e23-8a5d-a9977be81c72" path="/var/lib/kubelet/pods/a6331da5-ec97-4e23-8a5d-a9977be81c72/volumes" Dec 13 07:00:50 crc kubenswrapper[4644]: I1213 07:00:50.745789 4644 generic.go:334] "Generic (PLEG): container finished" podID="17497e45-c9ee-486a-a743-651b0447f79d" containerID="636f26a89a93e532cd2f574d0decf187977b0fad55f75b61415d39103173495f" exitCode=0 Dec 13 07:00:50 crc kubenswrapper[4644]: I1213 07:00:50.745862 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-xfcdr" event={"ID":"17497e45-c9ee-486a-a743-651b0447f79d","Type":"ContainerDied","Data":"636f26a89a93e532cd2f574d0decf187977b0fad55f75b61415d39103173495f"} Dec 13 07:00:50 crc kubenswrapper[4644]: I1213 07:00:50.747335 4644 generic.go:334] "Generic (PLEG): container finished" podID="7c1b1c7c-3447-400f-a82e-0d6a130b815f" containerID="e89f03a1aadee20bd362b68848db4133a0c0e31d8b7676e914264157d2ff912f" exitCode=0 Dec 13 07:00:50 crc kubenswrapper[4644]: I1213 07:00:50.747404 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c77f-account-create-update-8jqkn" event={"ID":"7c1b1c7c-3447-400f-a82e-0d6a130b815f","Type":"ContainerDied","Data":"e89f03a1aadee20bd362b68848db4133a0c0e31d8b7676e914264157d2ff912f"} Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.102502 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f342-account-create-update-nlr8s" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.107883 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5m9w2" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.171336 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cb6v4\" (UniqueName: \"kubernetes.io/projected/426e7525-babf-4c8e-aa46-e8bfa39968f1-kube-api-access-cb6v4\") pod \"426e7525-babf-4c8e-aa46-e8bfa39968f1\" (UID: \"426e7525-babf-4c8e-aa46-e8bfa39968f1\") " Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.171427 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/426e7525-babf-4c8e-aa46-e8bfa39968f1-operator-scripts\") pod \"426e7525-babf-4c8e-aa46-e8bfa39968f1\" (UID: \"426e7525-babf-4c8e-aa46-e8bfa39968f1\") " Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.171524 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76fa9df7-1c14-4734-94ba-8dc4d4b64ad3-operator-scripts\") pod \"76fa9df7-1c14-4734-94ba-8dc4d4b64ad3\" (UID: \"76fa9df7-1c14-4734-94ba-8dc4d4b64ad3\") " Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.171602 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6dpn\" (UniqueName: \"kubernetes.io/projected/76fa9df7-1c14-4734-94ba-8dc4d4b64ad3-kube-api-access-b6dpn\") pod \"76fa9df7-1c14-4734-94ba-8dc4d4b64ad3\" (UID: \"76fa9df7-1c14-4734-94ba-8dc4d4b64ad3\") " Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.172246 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/426e7525-babf-4c8e-aa46-e8bfa39968f1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "426e7525-babf-4c8e-aa46-e8bfa39968f1" (UID: "426e7525-babf-4c8e-aa46-e8bfa39968f1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.172540 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/426e7525-babf-4c8e-aa46-e8bfa39968f1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.173038 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76fa9df7-1c14-4734-94ba-8dc4d4b64ad3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "76fa9df7-1c14-4734-94ba-8dc4d4b64ad3" (UID: "76fa9df7-1c14-4734-94ba-8dc4d4b64ad3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.177264 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/426e7525-babf-4c8e-aa46-e8bfa39968f1-kube-api-access-cb6v4" (OuterVolumeSpecName: "kube-api-access-cb6v4") pod "426e7525-babf-4c8e-aa46-e8bfa39968f1" (UID: "426e7525-babf-4c8e-aa46-e8bfa39968f1"). InnerVolumeSpecName "kube-api-access-cb6v4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.177789 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76fa9df7-1c14-4734-94ba-8dc4d4b64ad3-kube-api-access-b6dpn" (OuterVolumeSpecName: "kube-api-access-b6dpn") pod "76fa9df7-1c14-4734-94ba-8dc4d4b64ad3" (UID: "76fa9df7-1c14-4734-94ba-8dc4d4b64ad3"). InnerVolumeSpecName "kube-api-access-b6dpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.274436 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76fa9df7-1c14-4734-94ba-8dc4d4b64ad3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.274482 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6dpn\" (UniqueName: \"kubernetes.io/projected/76fa9df7-1c14-4734-94ba-8dc4d4b64ad3-kube-api-access-b6dpn\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.274494 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cb6v4\" (UniqueName: \"kubernetes.io/projected/426e7525-babf-4c8e-aa46-e8bfa39968f1-kube-api-access-cb6v4\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.599935 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7cnxw"] Dec 13 07:00:51 crc kubenswrapper[4644]: E1213 07:00:51.600234 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6331da5-ec97-4e23-8a5d-a9977be81c72" containerName="init" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.600248 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6331da5-ec97-4e23-8a5d-a9977be81c72" containerName="init" Dec 13 07:00:51 crc kubenswrapper[4644]: E1213 07:00:51.600267 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6331da5-ec97-4e23-8a5d-a9977be81c72" containerName="dnsmasq-dns" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.600274 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6331da5-ec97-4e23-8a5d-a9977be81c72" containerName="dnsmasq-dns" Dec 13 07:00:51 crc kubenswrapper[4644]: E1213 07:00:51.600294 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76fa9df7-1c14-4734-94ba-8dc4d4b64ad3" containerName="mariadb-account-create-update" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.600301 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="76fa9df7-1c14-4734-94ba-8dc4d4b64ad3" containerName="mariadb-account-create-update" Dec 13 07:00:51 crc kubenswrapper[4644]: E1213 07:00:51.600338 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="426e7525-babf-4c8e-aa46-e8bfa39968f1" containerName="mariadb-database-create" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.600343 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="426e7525-babf-4c8e-aa46-e8bfa39968f1" containerName="mariadb-database-create" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.600519 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6331da5-ec97-4e23-8a5d-a9977be81c72" containerName="dnsmasq-dns" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.600535 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="76fa9df7-1c14-4734-94ba-8dc4d4b64ad3" containerName="mariadb-account-create-update" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.600546 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="426e7525-babf-4c8e-aa46-e8bfa39968f1" containerName="mariadb-database-create" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.601588 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.611807 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7cnxw"] Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.681281 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ec589a2-9516-4432-90ad-9799fee56ee7-catalog-content\") pod \"redhat-marketplace-7cnxw\" (UID: \"1ec589a2-9516-4432-90ad-9799fee56ee7\") " pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.681387 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c8z2\" (UniqueName: \"kubernetes.io/projected/1ec589a2-9516-4432-90ad-9799fee56ee7-kube-api-access-2c8z2\") pod \"redhat-marketplace-7cnxw\" (UID: \"1ec589a2-9516-4432-90ad-9799fee56ee7\") " pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.681502 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ec589a2-9516-4432-90ad-9799fee56ee7-utilities\") pod \"redhat-marketplace-7cnxw\" (UID: \"1ec589a2-9516-4432-90ad-9799fee56ee7\") " pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.754729 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f342-account-create-update-nlr8s" event={"ID":"76fa9df7-1c14-4734-94ba-8dc4d4b64ad3","Type":"ContainerDied","Data":"575cb8dbc57092b694cee7674bb4023b78f07dd60771b4f5e1631160f7490db4"} Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.754788 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="575cb8dbc57092b694cee7674bb4023b78f07dd60771b4f5e1631160f7490db4" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.754755 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f342-account-create-update-nlr8s" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.756401 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5m9w2" event={"ID":"426e7525-babf-4c8e-aa46-e8bfa39968f1","Type":"ContainerDied","Data":"9750f72005d7896f5596bce03d4d643e0add904b4d6639ebf0be8d4782ef1630"} Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.756433 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9750f72005d7896f5596bce03d4d643e0add904b4d6639ebf0be8d4782ef1630" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.756658 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5m9w2" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.783024 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c8z2\" (UniqueName: \"kubernetes.io/projected/1ec589a2-9516-4432-90ad-9799fee56ee7-kube-api-access-2c8z2\") pod \"redhat-marketplace-7cnxw\" (UID: \"1ec589a2-9516-4432-90ad-9799fee56ee7\") " pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.783128 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ec589a2-9516-4432-90ad-9799fee56ee7-utilities\") pod \"redhat-marketplace-7cnxw\" (UID: \"1ec589a2-9516-4432-90ad-9799fee56ee7\") " pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.783277 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ec589a2-9516-4432-90ad-9799fee56ee7-catalog-content\") pod \"redhat-marketplace-7cnxw\" (UID: \"1ec589a2-9516-4432-90ad-9799fee56ee7\") " pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.783767 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ec589a2-9516-4432-90ad-9799fee56ee7-utilities\") pod \"redhat-marketplace-7cnxw\" (UID: \"1ec589a2-9516-4432-90ad-9799fee56ee7\") " pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.783834 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ec589a2-9516-4432-90ad-9799fee56ee7-catalog-content\") pod \"redhat-marketplace-7cnxw\" (UID: \"1ec589a2-9516-4432-90ad-9799fee56ee7\") " pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.801268 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c8z2\" (UniqueName: \"kubernetes.io/projected/1ec589a2-9516-4432-90ad-9799fee56ee7-kube-api-access-2c8z2\") pod \"redhat-marketplace-7cnxw\" (UID: \"1ec589a2-9516-4432-90ad-9799fee56ee7\") " pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:00:51 crc kubenswrapper[4644]: I1213 07:00:51.916613 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.145509 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-xfcdr" Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.155668 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c77f-account-create-update-8jqkn" Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.291990 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xvpg\" (UniqueName: \"kubernetes.io/projected/7c1b1c7c-3447-400f-a82e-0d6a130b815f-kube-api-access-2xvpg\") pod \"7c1b1c7c-3447-400f-a82e-0d6a130b815f\" (UID: \"7c1b1c7c-3447-400f-a82e-0d6a130b815f\") " Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.292280 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c1b1c7c-3447-400f-a82e-0d6a130b815f-operator-scripts\") pod \"7c1b1c7c-3447-400f-a82e-0d6a130b815f\" (UID: \"7c1b1c7c-3447-400f-a82e-0d6a130b815f\") " Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.292360 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17497e45-c9ee-486a-a743-651b0447f79d-operator-scripts\") pod \"17497e45-c9ee-486a-a743-651b0447f79d\" (UID: \"17497e45-c9ee-486a-a743-651b0447f79d\") " Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.292478 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bb2lf\" (UniqueName: \"kubernetes.io/projected/17497e45-c9ee-486a-a743-651b0447f79d-kube-api-access-bb2lf\") pod \"17497e45-c9ee-486a-a743-651b0447f79d\" (UID: \"17497e45-c9ee-486a-a743-651b0447f79d\") " Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.292809 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c1b1c7c-3447-400f-a82e-0d6a130b815f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7c1b1c7c-3447-400f-a82e-0d6a130b815f" (UID: "7c1b1c7c-3447-400f-a82e-0d6a130b815f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.292952 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17497e45-c9ee-486a-a743-651b0447f79d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "17497e45-c9ee-486a-a743-651b0447f79d" (UID: "17497e45-c9ee-486a-a743-651b0447f79d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.293017 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c1b1c7c-3447-400f-a82e-0d6a130b815f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.293032 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17497e45-c9ee-486a-a743-651b0447f79d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.297532 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17497e45-c9ee-486a-a743-651b0447f79d-kube-api-access-bb2lf" (OuterVolumeSpecName: "kube-api-access-bb2lf") pod "17497e45-c9ee-486a-a743-651b0447f79d" (UID: "17497e45-c9ee-486a-a743-651b0447f79d"). InnerVolumeSpecName "kube-api-access-bb2lf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.298095 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c1b1c7c-3447-400f-a82e-0d6a130b815f-kube-api-access-2xvpg" (OuterVolumeSpecName: "kube-api-access-2xvpg") pod "7c1b1c7c-3447-400f-a82e-0d6a130b815f" (UID: "7c1b1c7c-3447-400f-a82e-0d6a130b815f"). InnerVolumeSpecName "kube-api-access-2xvpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.394821 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bb2lf\" (UniqueName: \"kubernetes.io/projected/17497e45-c9ee-486a-a743-651b0447f79d-kube-api-access-bb2lf\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.394854 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xvpg\" (UniqueName: \"kubernetes.io/projected/7c1b1c7c-3447-400f-a82e-0d6a130b815f-kube-api-access-2xvpg\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.397982 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7cnxw"] Dec 13 07:00:52 crc kubenswrapper[4644]: W1213 07:00:52.401799 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ec589a2_9516_4432_90ad_9799fee56ee7.slice/crio-90dc3fbe1cc568f1e1d941b2eb33ae56575106bde1f3e09b82382146ff588cfb WatchSource:0}: Error finding container 90dc3fbe1cc568f1e1d941b2eb33ae56575106bde1f3e09b82382146ff588cfb: Status 404 returned error can't find the container with id 90dc3fbe1cc568f1e1d941b2eb33ae56575106bde1f3e09b82382146ff588cfb Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.776759 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-xfcdr" event={"ID":"17497e45-c9ee-486a-a743-651b0447f79d","Type":"ContainerDied","Data":"ced3838c608e06b813dc9a5de2b9857139e2d43a391952cfca56bab302077c92"} Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.777516 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ced3838c608e06b813dc9a5de2b9857139e2d43a391952cfca56bab302077c92" Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.776790 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-xfcdr" Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.778090 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c77f-account-create-update-8jqkn" event={"ID":"7c1b1c7c-3447-400f-a82e-0d6a130b815f","Type":"ContainerDied","Data":"207ce72fbf5901656628c83aa8f3ea7e76753b9c7f209d095e98a077ba74f039"} Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.778131 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="207ce72fbf5901656628c83aa8f3ea7e76753b9c7f209d095e98a077ba74f039" Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.778192 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c77f-account-create-update-8jqkn" Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.780808 4644 generic.go:334] "Generic (PLEG): container finished" podID="1ec589a2-9516-4432-90ad-9799fee56ee7" containerID="fe343f3206e9b2c84e454f9ca600fb8a25cea61b10642a9987be86afb4a4f44e" exitCode=0 Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.780840 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7cnxw" event={"ID":"1ec589a2-9516-4432-90ad-9799fee56ee7","Type":"ContainerDied","Data":"fe343f3206e9b2c84e454f9ca600fb8a25cea61b10642a9987be86afb4a4f44e"} Dec 13 07:00:52 crc kubenswrapper[4644]: I1213 07:00:52.780859 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7cnxw" event={"ID":"1ec589a2-9516-4432-90ad-9799fee56ee7","Type":"ContainerStarted","Data":"90dc3fbe1cc568f1e1d941b2eb33ae56575106bde1f3e09b82382146ff588cfb"} Dec 13 07:00:53 crc kubenswrapper[4644]: I1213 07:00:53.787363 4644 generic.go:334] "Generic (PLEG): container finished" podID="1ec589a2-9516-4432-90ad-9799fee56ee7" containerID="3596decf08282b8bd73ca69c377029a88e876f7a40fb2b301c61b83601d2f5bd" exitCode=0 Dec 13 07:00:53 crc kubenswrapper[4644]: I1213 07:00:53.787451 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7cnxw" event={"ID":"1ec589a2-9516-4432-90ad-9799fee56ee7","Type":"ContainerDied","Data":"3596decf08282b8bd73ca69c377029a88e876f7a40fb2b301c61b83601d2f5bd"} Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.048110 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-wdksf"] Dec 13 07:00:54 crc kubenswrapper[4644]: E1213 07:00:54.048598 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c1b1c7c-3447-400f-a82e-0d6a130b815f" containerName="mariadb-account-create-update" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.048624 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c1b1c7c-3447-400f-a82e-0d6a130b815f" containerName="mariadb-account-create-update" Dec 13 07:00:54 crc kubenswrapper[4644]: E1213 07:00:54.048649 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17497e45-c9ee-486a-a743-651b0447f79d" containerName="mariadb-database-create" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.048656 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="17497e45-c9ee-486a-a743-651b0447f79d" containerName="mariadb-database-create" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.048881 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="17497e45-c9ee-486a-a743-651b0447f79d" containerName="mariadb-database-create" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.048922 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c1b1c7c-3447-400f-a82e-0d6a130b815f" containerName="mariadb-account-create-update" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.049534 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-wdksf" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.055773 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-wdksf"] Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.123075 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da31d5ac-6b38-41e4-a35f-8f627e375cc7-operator-scripts\") pod \"glance-db-create-wdksf\" (UID: \"da31d5ac-6b38-41e4-a35f-8f627e375cc7\") " pod="openstack/glance-db-create-wdksf" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.123341 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jb25t\" (UniqueName: \"kubernetes.io/projected/da31d5ac-6b38-41e4-a35f-8f627e375cc7-kube-api-access-jb25t\") pod \"glance-db-create-wdksf\" (UID: \"da31d5ac-6b38-41e4-a35f-8f627e375cc7\") " pod="openstack/glance-db-create-wdksf" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.148574 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-651a-account-create-update-v45dv"] Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.149586 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-651a-account-create-update-v45dv" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.151016 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.165093 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-651a-account-create-update-v45dv"] Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.225778 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f169b2c3-efe1-46c6-bfdb-7a30052dbc20-operator-scripts\") pod \"glance-651a-account-create-update-v45dv\" (UID: \"f169b2c3-efe1-46c6-bfdb-7a30052dbc20\") " pod="openstack/glance-651a-account-create-update-v45dv" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.225900 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da31d5ac-6b38-41e4-a35f-8f627e375cc7-operator-scripts\") pod \"glance-db-create-wdksf\" (UID: \"da31d5ac-6b38-41e4-a35f-8f627e375cc7\") " pod="openstack/glance-db-create-wdksf" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.225927 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jb25t\" (UniqueName: \"kubernetes.io/projected/da31d5ac-6b38-41e4-a35f-8f627e375cc7-kube-api-access-jb25t\") pod \"glance-db-create-wdksf\" (UID: \"da31d5ac-6b38-41e4-a35f-8f627e375cc7\") " pod="openstack/glance-db-create-wdksf" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.226044 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qj6gb\" (UniqueName: \"kubernetes.io/projected/f169b2c3-efe1-46c6-bfdb-7a30052dbc20-kube-api-access-qj6gb\") pod \"glance-651a-account-create-update-v45dv\" (UID: \"f169b2c3-efe1-46c6-bfdb-7a30052dbc20\") " pod="openstack/glance-651a-account-create-update-v45dv" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.226718 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da31d5ac-6b38-41e4-a35f-8f627e375cc7-operator-scripts\") pod \"glance-db-create-wdksf\" (UID: \"da31d5ac-6b38-41e4-a35f-8f627e375cc7\") " pod="openstack/glance-db-create-wdksf" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.243214 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jb25t\" (UniqueName: \"kubernetes.io/projected/da31d5ac-6b38-41e4-a35f-8f627e375cc7-kube-api-access-jb25t\") pod \"glance-db-create-wdksf\" (UID: \"da31d5ac-6b38-41e4-a35f-8f627e375cc7\") " pod="openstack/glance-db-create-wdksf" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.328088 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qj6gb\" (UniqueName: \"kubernetes.io/projected/f169b2c3-efe1-46c6-bfdb-7a30052dbc20-kube-api-access-qj6gb\") pod \"glance-651a-account-create-update-v45dv\" (UID: \"f169b2c3-efe1-46c6-bfdb-7a30052dbc20\") " pod="openstack/glance-651a-account-create-update-v45dv" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.328255 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f169b2c3-efe1-46c6-bfdb-7a30052dbc20-operator-scripts\") pod \"glance-651a-account-create-update-v45dv\" (UID: \"f169b2c3-efe1-46c6-bfdb-7a30052dbc20\") " pod="openstack/glance-651a-account-create-update-v45dv" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.329111 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f169b2c3-efe1-46c6-bfdb-7a30052dbc20-operator-scripts\") pod \"glance-651a-account-create-update-v45dv\" (UID: \"f169b2c3-efe1-46c6-bfdb-7a30052dbc20\") " pod="openstack/glance-651a-account-create-update-v45dv" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.342660 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qj6gb\" (UniqueName: \"kubernetes.io/projected/f169b2c3-efe1-46c6-bfdb-7a30052dbc20-kube-api-access-qj6gb\") pod \"glance-651a-account-create-update-v45dv\" (UID: \"f169b2c3-efe1-46c6-bfdb-7a30052dbc20\") " pod="openstack/glance-651a-account-create-update-v45dv" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.364961 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-wdksf" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.462953 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-651a-account-create-update-v45dv" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.745602 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-wdksf"] Dec 13 07:00:54 crc kubenswrapper[4644]: W1213 07:00:54.749158 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda31d5ac_6b38_41e4_a35f_8f627e375cc7.slice/crio-12a13cfacc5dfebe428a9fedfa70191992ba317684a122b467a0ce5c41c7dd62 WatchSource:0}: Error finding container 12a13cfacc5dfebe428a9fedfa70191992ba317684a122b467a0ce5c41c7dd62: Status 404 returned error can't find the container with id 12a13cfacc5dfebe428a9fedfa70191992ba317684a122b467a0ce5c41c7dd62 Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.797467 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7cnxw" event={"ID":"1ec589a2-9516-4432-90ad-9799fee56ee7","Type":"ContainerStarted","Data":"3161dd93de7b911954d72d47344a8f817b5f1c300df6acdb85bb56a73d0fca84"} Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.800071 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-wdksf" event={"ID":"da31d5ac-6b38-41e4-a35f-8f627e375cc7","Type":"ContainerStarted","Data":"12a13cfacc5dfebe428a9fedfa70191992ba317684a122b467a0ce5c41c7dd62"} Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.856114 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7cnxw" podStartSLOduration=2.2061790119999998 podStartE2EDuration="3.856095546s" podCreationTimestamp="2025-12-13 07:00:51 +0000 UTC" firstStartedPulling="2025-12-13 07:00:52.7824379 +0000 UTC m=+914.997388733" lastFinishedPulling="2025-12-13 07:00:54.432354434 +0000 UTC m=+916.647305267" observedRunningTime="2025-12-13 07:00:54.814598702 +0000 UTC m=+917.029549534" watchObservedRunningTime="2025-12-13 07:00:54.856095546 +0000 UTC m=+917.071046379" Dec 13 07:00:54 crc kubenswrapper[4644]: I1213 07:00:54.857532 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-651a-account-create-update-v45dv"] Dec 13 07:00:54 crc kubenswrapper[4644]: W1213 07:00:54.860872 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf169b2c3_efe1_46c6_bfdb_7a30052dbc20.slice/crio-1232a6dc24c50b2792691e196d350297d1016d3a1795865a012aa47dd7522248 WatchSource:0}: Error finding container 1232a6dc24c50b2792691e196d350297d1016d3a1795865a012aa47dd7522248: Status 404 returned error can't find the container with id 1232a6dc24c50b2792691e196d350297d1016d3a1795865a012aa47dd7522248 Dec 13 07:00:55 crc kubenswrapper[4644]: I1213 07:00:55.807289 4644 generic.go:334] "Generic (PLEG): container finished" podID="da31d5ac-6b38-41e4-a35f-8f627e375cc7" containerID="2e17f64ff924b1cd080c58ecb9b5c4d9c839e0fe4b253ca92b041ed4d718f21c" exitCode=0 Dec 13 07:00:55 crc kubenswrapper[4644]: I1213 07:00:55.807329 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-wdksf" event={"ID":"da31d5ac-6b38-41e4-a35f-8f627e375cc7","Type":"ContainerDied","Data":"2e17f64ff924b1cd080c58ecb9b5c4d9c839e0fe4b253ca92b041ed4d718f21c"} Dec 13 07:00:55 crc kubenswrapper[4644]: I1213 07:00:55.809004 4644 generic.go:334] "Generic (PLEG): container finished" podID="f169b2c3-efe1-46c6-bfdb-7a30052dbc20" containerID="eed90f160c8516ddb15f1542358bc940498ed2839865871c1607d14d97007cce" exitCode=0 Dec 13 07:00:55 crc kubenswrapper[4644]: I1213 07:00:55.809048 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-651a-account-create-update-v45dv" event={"ID":"f169b2c3-efe1-46c6-bfdb-7a30052dbc20","Type":"ContainerDied","Data":"eed90f160c8516ddb15f1542358bc940498ed2839865871c1607d14d97007cce"} Dec 13 07:00:55 crc kubenswrapper[4644]: I1213 07:00:55.809071 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-651a-account-create-update-v45dv" event={"ID":"f169b2c3-efe1-46c6-bfdb-7a30052dbc20","Type":"ContainerStarted","Data":"1232a6dc24c50b2792691e196d350297d1016d3a1795865a012aa47dd7522248"} Dec 13 07:00:56 crc kubenswrapper[4644]: I1213 07:00:56.272731 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 13 07:00:56 crc kubenswrapper[4644]: I1213 07:00:56.779435 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jd5r8"] Dec 13 07:00:56 crc kubenswrapper[4644]: I1213 07:00:56.781013 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:00:56 crc kubenswrapper[4644]: I1213 07:00:56.787032 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jd5r8"] Dec 13 07:00:56 crc kubenswrapper[4644]: I1213 07:00:56.866854 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5fwv\" (UniqueName: \"kubernetes.io/projected/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-kube-api-access-m5fwv\") pod \"redhat-operators-jd5r8\" (UID: \"69240bb4-8ef9-4859-a1f8-c112b1c39e3d\") " pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:00:56 crc kubenswrapper[4644]: I1213 07:00:56.866906 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-catalog-content\") pod \"redhat-operators-jd5r8\" (UID: \"69240bb4-8ef9-4859-a1f8-c112b1c39e3d\") " pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:00:56 crc kubenswrapper[4644]: I1213 07:00:56.866946 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-utilities\") pod \"redhat-operators-jd5r8\" (UID: \"69240bb4-8ef9-4859-a1f8-c112b1c39e3d\") " pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:00:56 crc kubenswrapper[4644]: I1213 07:00:56.968898 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5fwv\" (UniqueName: \"kubernetes.io/projected/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-kube-api-access-m5fwv\") pod \"redhat-operators-jd5r8\" (UID: \"69240bb4-8ef9-4859-a1f8-c112b1c39e3d\") " pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:00:56 crc kubenswrapper[4644]: I1213 07:00:56.968946 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-catalog-content\") pod \"redhat-operators-jd5r8\" (UID: \"69240bb4-8ef9-4859-a1f8-c112b1c39e3d\") " pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:00:56 crc kubenswrapper[4644]: I1213 07:00:56.968982 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-utilities\") pod \"redhat-operators-jd5r8\" (UID: \"69240bb4-8ef9-4859-a1f8-c112b1c39e3d\") " pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:00:56 crc kubenswrapper[4644]: I1213 07:00:56.969507 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-utilities\") pod \"redhat-operators-jd5r8\" (UID: \"69240bb4-8ef9-4859-a1f8-c112b1c39e3d\") " pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:00:56 crc kubenswrapper[4644]: I1213 07:00:56.970324 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-catalog-content\") pod \"redhat-operators-jd5r8\" (UID: \"69240bb4-8ef9-4859-a1f8-c112b1c39e3d\") " pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.007263 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5fwv\" (UniqueName: \"kubernetes.io/projected/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-kube-api-access-m5fwv\") pod \"redhat-operators-jd5r8\" (UID: \"69240bb4-8ef9-4859-a1f8-c112b1c39e3d\") " pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.095104 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.197965 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-wdksf" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.211573 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-651a-account-create-update-v45dv" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.273112 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da31d5ac-6b38-41e4-a35f-8f627e375cc7-operator-scripts\") pod \"da31d5ac-6b38-41e4-a35f-8f627e375cc7\" (UID: \"da31d5ac-6b38-41e4-a35f-8f627e375cc7\") " Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.273555 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jb25t\" (UniqueName: \"kubernetes.io/projected/da31d5ac-6b38-41e4-a35f-8f627e375cc7-kube-api-access-jb25t\") pod \"da31d5ac-6b38-41e4-a35f-8f627e375cc7\" (UID: \"da31d5ac-6b38-41e4-a35f-8f627e375cc7\") " Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.274773 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da31d5ac-6b38-41e4-a35f-8f627e375cc7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "da31d5ac-6b38-41e4-a35f-8f627e375cc7" (UID: "da31d5ac-6b38-41e4-a35f-8f627e375cc7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.289466 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da31d5ac-6b38-41e4-a35f-8f627e375cc7-kube-api-access-jb25t" (OuterVolumeSpecName: "kube-api-access-jb25t") pod "da31d5ac-6b38-41e4-a35f-8f627e375cc7" (UID: "da31d5ac-6b38-41e4-a35f-8f627e375cc7"). InnerVolumeSpecName "kube-api-access-jb25t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.375733 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qj6gb\" (UniqueName: \"kubernetes.io/projected/f169b2c3-efe1-46c6-bfdb-7a30052dbc20-kube-api-access-qj6gb\") pod \"f169b2c3-efe1-46c6-bfdb-7a30052dbc20\" (UID: \"f169b2c3-efe1-46c6-bfdb-7a30052dbc20\") " Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.375985 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f169b2c3-efe1-46c6-bfdb-7a30052dbc20-operator-scripts\") pod \"f169b2c3-efe1-46c6-bfdb-7a30052dbc20\" (UID: \"f169b2c3-efe1-46c6-bfdb-7a30052dbc20\") " Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.376367 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jb25t\" (UniqueName: \"kubernetes.io/projected/da31d5ac-6b38-41e4-a35f-8f627e375cc7-kube-api-access-jb25t\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.376380 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da31d5ac-6b38-41e4-a35f-8f627e375cc7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.376779 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f169b2c3-efe1-46c6-bfdb-7a30052dbc20-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f169b2c3-efe1-46c6-bfdb-7a30052dbc20" (UID: "f169b2c3-efe1-46c6-bfdb-7a30052dbc20"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.379666 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f169b2c3-efe1-46c6-bfdb-7a30052dbc20-kube-api-access-qj6gb" (OuterVolumeSpecName: "kube-api-access-qj6gb") pod "f169b2c3-efe1-46c6-bfdb-7a30052dbc20" (UID: "f169b2c3-efe1-46c6-bfdb-7a30052dbc20"). InnerVolumeSpecName "kube-api-access-qj6gb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.478609 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f169b2c3-efe1-46c6-bfdb-7a30052dbc20-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.478639 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qj6gb\" (UniqueName: \"kubernetes.io/projected/f169b2c3-efe1-46c6-bfdb-7a30052dbc20-kube-api-access-qj6gb\") on node \"crc\" DevicePath \"\"" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.522145 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jd5r8"] Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.822088 4644 generic.go:334] "Generic (PLEG): container finished" podID="69240bb4-8ef9-4859-a1f8-c112b1c39e3d" containerID="365681d0fd52e4c0750d3e83f9e68d1d98b4135a01b6f55a0c202424ca09415a" exitCode=0 Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.822170 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jd5r8" event={"ID":"69240bb4-8ef9-4859-a1f8-c112b1c39e3d","Type":"ContainerDied","Data":"365681d0fd52e4c0750d3e83f9e68d1d98b4135a01b6f55a0c202424ca09415a"} Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.822354 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jd5r8" event={"ID":"69240bb4-8ef9-4859-a1f8-c112b1c39e3d","Type":"ContainerStarted","Data":"cb364542f980a49f554ee159547e9455bca6b07ee60ea77e5104a0bf2c6089b5"} Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.825150 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-651a-account-create-update-v45dv" event={"ID":"f169b2c3-efe1-46c6-bfdb-7a30052dbc20","Type":"ContainerDied","Data":"1232a6dc24c50b2792691e196d350297d1016d3a1795865a012aa47dd7522248"} Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.825185 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1232a6dc24c50b2792691e196d350297d1016d3a1795865a012aa47dd7522248" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.825226 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-651a-account-create-update-v45dv" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.826675 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-wdksf" event={"ID":"da31d5ac-6b38-41e4-a35f-8f627e375cc7","Type":"ContainerDied","Data":"12a13cfacc5dfebe428a9fedfa70191992ba317684a122b467a0ce5c41c7dd62"} Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.826715 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12a13cfacc5dfebe428a9fedfa70191992ba317684a122b467a0ce5c41c7dd62" Dec 13 07:00:57 crc kubenswrapper[4644]: I1213 07:00:57.826752 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-wdksf" Dec 13 07:00:57 crc kubenswrapper[4644]: E1213 07:00:57.875821 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47f5ebb6_8937_441f_9c9f_a1e31c401eed.slice/crio-b8fc48432f72b692739947981613bbb69944b32e6c437e9a01d15154f417b1b5\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47f5ebb6_8937_441f_9c9f_a1e31c401eed.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda31d5ac_6b38_41e4_a35f_8f627e375cc7.slice\": RecentStats: unable to find data in memory cache]" Dec 13 07:00:58 crc kubenswrapper[4644]: I1213 07:00:58.834967 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jd5r8" event={"ID":"69240bb4-8ef9-4859-a1f8-c112b1c39e3d","Type":"ContainerStarted","Data":"b26c4260c06fda49a36b107d6c8077981185d2e91ad8a9999be21d41f4cfccfe"} Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.321494 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-lc7dc"] Dec 13 07:00:59 crc kubenswrapper[4644]: E1213 07:00:59.321862 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f169b2c3-efe1-46c6-bfdb-7a30052dbc20" containerName="mariadb-account-create-update" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.321877 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f169b2c3-efe1-46c6-bfdb-7a30052dbc20" containerName="mariadb-account-create-update" Dec 13 07:00:59 crc kubenswrapper[4644]: E1213 07:00:59.321901 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da31d5ac-6b38-41e4-a35f-8f627e375cc7" containerName="mariadb-database-create" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.321908 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="da31d5ac-6b38-41e4-a35f-8f627e375cc7" containerName="mariadb-database-create" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.322040 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="da31d5ac-6b38-41e4-a35f-8f627e375cc7" containerName="mariadb-database-create" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.322048 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f169b2c3-efe1-46c6-bfdb-7a30052dbc20" containerName="mariadb-account-create-update" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.322584 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lc7dc" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.324151 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.324344 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-58bbs" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.333846 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-lc7dc"] Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.410967 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-db-sync-config-data\") pod \"glance-db-sync-lc7dc\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " pod="openstack/glance-db-sync-lc7dc" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.411075 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-config-data\") pod \"glance-db-sync-lc7dc\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " pod="openstack/glance-db-sync-lc7dc" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.411189 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wgk5\" (UniqueName: \"kubernetes.io/projected/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-kube-api-access-6wgk5\") pod \"glance-db-sync-lc7dc\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " pod="openstack/glance-db-sync-lc7dc" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.411325 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-combined-ca-bundle\") pod \"glance-db-sync-lc7dc\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " pod="openstack/glance-db-sync-lc7dc" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.512536 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wgk5\" (UniqueName: \"kubernetes.io/projected/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-kube-api-access-6wgk5\") pod \"glance-db-sync-lc7dc\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " pod="openstack/glance-db-sync-lc7dc" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.512637 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-combined-ca-bundle\") pod \"glance-db-sync-lc7dc\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " pod="openstack/glance-db-sync-lc7dc" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.513327 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-db-sync-config-data\") pod \"glance-db-sync-lc7dc\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " pod="openstack/glance-db-sync-lc7dc" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.513839 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-config-data\") pod \"glance-db-sync-lc7dc\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " pod="openstack/glance-db-sync-lc7dc" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.518775 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-db-sync-config-data\") pod \"glance-db-sync-lc7dc\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " pod="openstack/glance-db-sync-lc7dc" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.518868 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-config-data\") pod \"glance-db-sync-lc7dc\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " pod="openstack/glance-db-sync-lc7dc" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.521339 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-combined-ca-bundle\") pod \"glance-db-sync-lc7dc\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " pod="openstack/glance-db-sync-lc7dc" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.527085 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wgk5\" (UniqueName: \"kubernetes.io/projected/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-kube-api-access-6wgk5\") pod \"glance-db-sync-lc7dc\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " pod="openstack/glance-db-sync-lc7dc" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.637394 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lc7dc" Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.845247 4644 generic.go:334] "Generic (PLEG): container finished" podID="69240bb4-8ef9-4859-a1f8-c112b1c39e3d" containerID="b26c4260c06fda49a36b107d6c8077981185d2e91ad8a9999be21d41f4cfccfe" exitCode=0 Dec 13 07:00:59 crc kubenswrapper[4644]: I1213 07:00:59.845290 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jd5r8" event={"ID":"69240bb4-8ef9-4859-a1f8-c112b1c39e3d","Type":"ContainerDied","Data":"b26c4260c06fda49a36b107d6c8077981185d2e91ad8a9999be21d41f4cfccfe"} Dec 13 07:01:00 crc kubenswrapper[4644]: I1213 07:01:00.087887 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-lc7dc"] Dec 13 07:01:00 crc kubenswrapper[4644]: I1213 07:01:00.852203 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lc7dc" event={"ID":"b1204c9c-4441-44c3-8c02-cbf9c3c74a88","Type":"ContainerStarted","Data":"c773e41efedf1367a309eac0dac0c269634945560d41308919ab3397a617516d"} Dec 13 07:01:00 crc kubenswrapper[4644]: I1213 07:01:00.854952 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jd5r8" event={"ID":"69240bb4-8ef9-4859-a1f8-c112b1c39e3d","Type":"ContainerStarted","Data":"c2c3d97ab5705d0441e777d8a657a0cca4aed68c2e3d8fa324122ad10717f7b2"} Dec 13 07:01:00 crc kubenswrapper[4644]: I1213 07:01:00.880226 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jd5r8" podStartSLOduration=2.278350147 podStartE2EDuration="4.880208846s" podCreationTimestamp="2025-12-13 07:00:56 +0000 UTC" firstStartedPulling="2025-12-13 07:00:57.824039257 +0000 UTC m=+920.038990090" lastFinishedPulling="2025-12-13 07:01:00.425897956 +0000 UTC m=+922.640848789" observedRunningTime="2025-12-13 07:01:00.872990945 +0000 UTC m=+923.087941777" watchObservedRunningTime="2025-12-13 07:01:00.880208846 +0000 UTC m=+923.095159679" Dec 13 07:01:01 crc kubenswrapper[4644]: I1213 07:01:01.916908 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:01:01 crc kubenswrapper[4644]: I1213 07:01:01.917922 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:01:01 crc kubenswrapper[4644]: I1213 07:01:01.960467 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:01:02 crc kubenswrapper[4644]: I1213 07:01:02.904210 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:01:03 crc kubenswrapper[4644]: I1213 07:01:03.180865 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7cnxw"] Dec 13 07:01:04 crc kubenswrapper[4644]: I1213 07:01:04.881980 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7cnxw" podUID="1ec589a2-9516-4432-90ad-9799fee56ee7" containerName="registry-server" containerID="cri-o://3161dd93de7b911954d72d47344a8f817b5f1c300df6acdb85bb56a73d0fca84" gracePeriod=2 Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.284755 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.412222 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ec589a2-9516-4432-90ad-9799fee56ee7-utilities\") pod \"1ec589a2-9516-4432-90ad-9799fee56ee7\" (UID: \"1ec589a2-9516-4432-90ad-9799fee56ee7\") " Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.412610 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2c8z2\" (UniqueName: \"kubernetes.io/projected/1ec589a2-9516-4432-90ad-9799fee56ee7-kube-api-access-2c8z2\") pod \"1ec589a2-9516-4432-90ad-9799fee56ee7\" (UID: \"1ec589a2-9516-4432-90ad-9799fee56ee7\") " Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.412813 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ec589a2-9516-4432-90ad-9799fee56ee7-catalog-content\") pod \"1ec589a2-9516-4432-90ad-9799fee56ee7\" (UID: \"1ec589a2-9516-4432-90ad-9799fee56ee7\") " Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.413128 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ec589a2-9516-4432-90ad-9799fee56ee7-utilities" (OuterVolumeSpecName: "utilities") pod "1ec589a2-9516-4432-90ad-9799fee56ee7" (UID: "1ec589a2-9516-4432-90ad-9799fee56ee7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.413498 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ec589a2-9516-4432-90ad-9799fee56ee7-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.424378 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ec589a2-9516-4432-90ad-9799fee56ee7-kube-api-access-2c8z2" (OuterVolumeSpecName: "kube-api-access-2c8z2") pod "1ec589a2-9516-4432-90ad-9799fee56ee7" (UID: "1ec589a2-9516-4432-90ad-9799fee56ee7"). InnerVolumeSpecName "kube-api-access-2c8z2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.427041 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ec589a2-9516-4432-90ad-9799fee56ee7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1ec589a2-9516-4432-90ad-9799fee56ee7" (UID: "1ec589a2-9516-4432-90ad-9799fee56ee7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.515187 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ec589a2-9516-4432-90ad-9799fee56ee7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.515219 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2c8z2\" (UniqueName: \"kubernetes.io/projected/1ec589a2-9516-4432-90ad-9799fee56ee7-kube-api-access-2c8z2\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.903148 4644 generic.go:334] "Generic (PLEG): container finished" podID="1ec589a2-9516-4432-90ad-9799fee56ee7" containerID="3161dd93de7b911954d72d47344a8f817b5f1c300df6acdb85bb56a73d0fca84" exitCode=0 Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.903231 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7cnxw" event={"ID":"1ec589a2-9516-4432-90ad-9799fee56ee7","Type":"ContainerDied","Data":"3161dd93de7b911954d72d47344a8f817b5f1c300df6acdb85bb56a73d0fca84"} Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.903285 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7cnxw" event={"ID":"1ec589a2-9516-4432-90ad-9799fee56ee7","Type":"ContainerDied","Data":"90dc3fbe1cc568f1e1d941b2eb33ae56575106bde1f3e09b82382146ff588cfb"} Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.903320 4644 scope.go:117] "RemoveContainer" containerID="3161dd93de7b911954d72d47344a8f817b5f1c300df6acdb85bb56a73d0fca84" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.903891 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7cnxw" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.929227 4644 scope.go:117] "RemoveContainer" containerID="3596decf08282b8bd73ca69c377029a88e876f7a40fb2b301c61b83601d2f5bd" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.936305 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7cnxw"] Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.942033 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7cnxw"] Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.959897 4644 scope.go:117] "RemoveContainer" containerID="fe343f3206e9b2c84e454f9ca600fb8a25cea61b10642a9987be86afb4a4f44e" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.978090 4644 scope.go:117] "RemoveContainer" containerID="3161dd93de7b911954d72d47344a8f817b5f1c300df6acdb85bb56a73d0fca84" Dec 13 07:01:05 crc kubenswrapper[4644]: E1213 07:01:05.978557 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3161dd93de7b911954d72d47344a8f817b5f1c300df6acdb85bb56a73d0fca84\": container with ID starting with 3161dd93de7b911954d72d47344a8f817b5f1c300df6acdb85bb56a73d0fca84 not found: ID does not exist" containerID="3161dd93de7b911954d72d47344a8f817b5f1c300df6acdb85bb56a73d0fca84" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.978597 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3161dd93de7b911954d72d47344a8f817b5f1c300df6acdb85bb56a73d0fca84"} err="failed to get container status \"3161dd93de7b911954d72d47344a8f817b5f1c300df6acdb85bb56a73d0fca84\": rpc error: code = NotFound desc = could not find container \"3161dd93de7b911954d72d47344a8f817b5f1c300df6acdb85bb56a73d0fca84\": container with ID starting with 3161dd93de7b911954d72d47344a8f817b5f1c300df6acdb85bb56a73d0fca84 not found: ID does not exist" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.978642 4644 scope.go:117] "RemoveContainer" containerID="3596decf08282b8bd73ca69c377029a88e876f7a40fb2b301c61b83601d2f5bd" Dec 13 07:01:05 crc kubenswrapper[4644]: E1213 07:01:05.979048 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3596decf08282b8bd73ca69c377029a88e876f7a40fb2b301c61b83601d2f5bd\": container with ID starting with 3596decf08282b8bd73ca69c377029a88e876f7a40fb2b301c61b83601d2f5bd not found: ID does not exist" containerID="3596decf08282b8bd73ca69c377029a88e876f7a40fb2b301c61b83601d2f5bd" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.979071 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3596decf08282b8bd73ca69c377029a88e876f7a40fb2b301c61b83601d2f5bd"} err="failed to get container status \"3596decf08282b8bd73ca69c377029a88e876f7a40fb2b301c61b83601d2f5bd\": rpc error: code = NotFound desc = could not find container \"3596decf08282b8bd73ca69c377029a88e876f7a40fb2b301c61b83601d2f5bd\": container with ID starting with 3596decf08282b8bd73ca69c377029a88e876f7a40fb2b301c61b83601d2f5bd not found: ID does not exist" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.979084 4644 scope.go:117] "RemoveContainer" containerID="fe343f3206e9b2c84e454f9ca600fb8a25cea61b10642a9987be86afb4a4f44e" Dec 13 07:01:05 crc kubenswrapper[4644]: E1213 07:01:05.979380 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe343f3206e9b2c84e454f9ca600fb8a25cea61b10642a9987be86afb4a4f44e\": container with ID starting with fe343f3206e9b2c84e454f9ca600fb8a25cea61b10642a9987be86afb4a4f44e not found: ID does not exist" containerID="fe343f3206e9b2c84e454f9ca600fb8a25cea61b10642a9987be86afb4a4f44e" Dec 13 07:01:05 crc kubenswrapper[4644]: I1213 07:01:05.979414 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe343f3206e9b2c84e454f9ca600fb8a25cea61b10642a9987be86afb4a4f44e"} err="failed to get container status \"fe343f3206e9b2c84e454f9ca600fb8a25cea61b10642a9987be86afb4a4f44e\": rpc error: code = NotFound desc = could not find container \"fe343f3206e9b2c84e454f9ca600fb8a25cea61b10642a9987be86afb4a4f44e\": container with ID starting with fe343f3206e9b2c84e454f9ca600fb8a25cea61b10642a9987be86afb4a4f44e not found: ID does not exist" Dec 13 07:01:06 crc kubenswrapper[4644]: I1213 07:01:06.398182 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ec589a2-9516-4432-90ad-9799fee56ee7" path="/var/lib/kubelet/pods/1ec589a2-9516-4432-90ad-9799fee56ee7/volumes" Dec 13 07:01:07 crc kubenswrapper[4644]: I1213 07:01:07.095690 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:01:07 crc kubenswrapper[4644]: I1213 07:01:07.095746 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:01:07 crc kubenswrapper[4644]: I1213 07:01:07.128802 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:01:07 crc kubenswrapper[4644]: I1213 07:01:07.923329 4644 generic.go:334] "Generic (PLEG): container finished" podID="0dd20500-a2dd-4608-a3c8-7d714ffb09c4" containerID="7fb838eb12d8d62aadffa1f046880d9513f421e1f29a5d58016251c007b5e95d" exitCode=0 Dec 13 07:01:07 crc kubenswrapper[4644]: I1213 07:01:07.923431 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0dd20500-a2dd-4608-a3c8-7d714ffb09c4","Type":"ContainerDied","Data":"7fb838eb12d8d62aadffa1f046880d9513f421e1f29a5d58016251c007b5e95d"} Dec 13 07:01:07 crc kubenswrapper[4644]: I1213 07:01:07.926235 4644 generic.go:334] "Generic (PLEG): container finished" podID="706c4700-9d13-4bac-b7ac-0c19c09cc7e7" containerID="70a5774436468fb133077a60d251233d249cc290e92dd10a84f76c87a4876089" exitCode=0 Dec 13 07:01:07 crc kubenswrapper[4644]: I1213 07:01:07.926293 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"706c4700-9d13-4bac-b7ac-0c19c09cc7e7","Type":"ContainerDied","Data":"70a5774436468fb133077a60d251233d249cc290e92dd10a84f76c87a4876089"} Dec 13 07:01:07 crc kubenswrapper[4644]: I1213 07:01:07.974047 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:01:08 crc kubenswrapper[4644]: E1213 07:01:08.066178 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47f5ebb6_8937_441f_9c9f_a1e31c401eed.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47f5ebb6_8937_441f_9c9f_a1e31c401eed.slice/crio-b8fc48432f72b692739947981613bbb69944b32e6c437e9a01d15154f417b1b5\": RecentStats: unable to find data in memory cache]" Dec 13 07:01:08 crc kubenswrapper[4644]: I1213 07:01:08.576245 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jd5r8"] Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.176667 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.179407 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-cb7bh" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.182972 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-txrxh" podUID="6e2e749f-da1d-4692-9479-275cd00bc4b0" containerName="ovn-controller" probeResult="failure" output=< Dec 13 07:01:09 crc kubenswrapper[4644]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 13 07:01:09 crc kubenswrapper[4644]: > Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.385069 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-txrxh-config-2n8t7"] Dec 13 07:01:09 crc kubenswrapper[4644]: E1213 07:01:09.385971 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ec589a2-9516-4432-90ad-9799fee56ee7" containerName="extract-content" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.386078 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ec589a2-9516-4432-90ad-9799fee56ee7" containerName="extract-content" Dec 13 07:01:09 crc kubenswrapper[4644]: E1213 07:01:09.386164 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ec589a2-9516-4432-90ad-9799fee56ee7" containerName="registry-server" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.386215 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ec589a2-9516-4432-90ad-9799fee56ee7" containerName="registry-server" Dec 13 07:01:09 crc kubenswrapper[4644]: E1213 07:01:09.386285 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ec589a2-9516-4432-90ad-9799fee56ee7" containerName="extract-utilities" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.386332 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ec589a2-9516-4432-90ad-9799fee56ee7" containerName="extract-utilities" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.386587 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ec589a2-9516-4432-90ad-9799fee56ee7" containerName="registry-server" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.387232 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.389818 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.398160 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-txrxh-config-2n8t7"] Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.495797 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-run-ovn\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.495889 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54a575e1-c07b-4928-ab41-90deba7ddaad-scripts\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.495911 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lr7fw\" (UniqueName: \"kubernetes.io/projected/54a575e1-c07b-4928-ab41-90deba7ddaad-kube-api-access-lr7fw\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.495996 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-log-ovn\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.496066 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/54a575e1-c07b-4928-ab41-90deba7ddaad-additional-scripts\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.496139 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-run\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.598389 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-run-ovn\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.598525 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54a575e1-c07b-4928-ab41-90deba7ddaad-scripts\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.598554 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lr7fw\" (UniqueName: \"kubernetes.io/projected/54a575e1-c07b-4928-ab41-90deba7ddaad-kube-api-access-lr7fw\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.598605 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-log-ovn\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.598668 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/54a575e1-c07b-4928-ab41-90deba7ddaad-additional-scripts\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.598681 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-run-ovn\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.598756 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-log-ovn\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.598777 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-run\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.598857 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-run\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.599581 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/54a575e1-c07b-4928-ab41-90deba7ddaad-additional-scripts\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.610736 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54a575e1-c07b-4928-ab41-90deba7ddaad-scripts\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.616660 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lr7fw\" (UniqueName: \"kubernetes.io/projected/54a575e1-c07b-4928-ab41-90deba7ddaad-kube-api-access-lr7fw\") pod \"ovn-controller-txrxh-config-2n8t7\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.722066 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.753515 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.753618 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.753663 4644 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.754960 4644 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5e6f928015ce5189200b70ce1217403ef17ccffc81e2b7876249680646ea25ba"} pod="openshift-machine-config-operator/machine-config-daemon-45tj4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.755014 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" containerID="cri-o://5e6f928015ce5189200b70ce1217403ef17ccffc81e2b7876249680646ea25ba" gracePeriod=600 Dec 13 07:01:09 crc kubenswrapper[4644]: I1213 07:01:09.939648 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jd5r8" podUID="69240bb4-8ef9-4859-a1f8-c112b1c39e3d" containerName="registry-server" containerID="cri-o://c2c3d97ab5705d0441e777d8a657a0cca4aed68c2e3d8fa324122ad10717f7b2" gracePeriod=2 Dec 13 07:01:10 crc kubenswrapper[4644]: I1213 07:01:10.951754 4644 generic.go:334] "Generic (PLEG): container finished" podID="48240f19-087e-4597-b448-ab1a190a5027" containerID="5e6f928015ce5189200b70ce1217403ef17ccffc81e2b7876249680646ea25ba" exitCode=0 Dec 13 07:01:10 crc kubenswrapper[4644]: I1213 07:01:10.951831 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerDied","Data":"5e6f928015ce5189200b70ce1217403ef17ccffc81e2b7876249680646ea25ba"} Dec 13 07:01:10 crc kubenswrapper[4644]: I1213 07:01:10.952161 4644 scope.go:117] "RemoveContainer" containerID="2487f8fbc172ccc82773d3da9a7aed4fb2a0c9cb73ab10d78d14719b9fd79f00" Dec 13 07:01:10 crc kubenswrapper[4644]: I1213 07:01:10.960052 4644 generic.go:334] "Generic (PLEG): container finished" podID="69240bb4-8ef9-4859-a1f8-c112b1c39e3d" containerID="c2c3d97ab5705d0441e777d8a657a0cca4aed68c2e3d8fa324122ad10717f7b2" exitCode=0 Dec 13 07:01:10 crc kubenswrapper[4644]: I1213 07:01:10.960096 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jd5r8" event={"ID":"69240bb4-8ef9-4859-a1f8-c112b1c39e3d","Type":"ContainerDied","Data":"c2c3d97ab5705d0441e777d8a657a0cca4aed68c2e3d8fa324122ad10717f7b2"} Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.707071 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.850688 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-txrxh-config-2n8t7"] Dec 13 07:01:12 crc kubenswrapper[4644]: W1213 07:01:12.851480 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod54a575e1_c07b_4928_ab41_90deba7ddaad.slice/crio-b903409bc7a506c57d8fc1a615ac1d84ef6f572bc18574226c2f6cbd32970290 WatchSource:0}: Error finding container b903409bc7a506c57d8fc1a615ac1d84ef6f572bc18574226c2f6cbd32970290: Status 404 returned error can't find the container with id b903409bc7a506c57d8fc1a615ac1d84ef6f572bc18574226c2f6cbd32970290 Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.860819 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5fwv\" (UniqueName: \"kubernetes.io/projected/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-kube-api-access-m5fwv\") pod \"69240bb4-8ef9-4859-a1f8-c112b1c39e3d\" (UID: \"69240bb4-8ef9-4859-a1f8-c112b1c39e3d\") " Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.860957 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-utilities\") pod \"69240bb4-8ef9-4859-a1f8-c112b1c39e3d\" (UID: \"69240bb4-8ef9-4859-a1f8-c112b1c39e3d\") " Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.861065 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-catalog-content\") pod \"69240bb4-8ef9-4859-a1f8-c112b1c39e3d\" (UID: \"69240bb4-8ef9-4859-a1f8-c112b1c39e3d\") " Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.862065 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-utilities" (OuterVolumeSpecName: "utilities") pod "69240bb4-8ef9-4859-a1f8-c112b1c39e3d" (UID: "69240bb4-8ef9-4859-a1f8-c112b1c39e3d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.869484 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-kube-api-access-m5fwv" (OuterVolumeSpecName: "kube-api-access-m5fwv") pod "69240bb4-8ef9-4859-a1f8-c112b1c39e3d" (UID: "69240bb4-8ef9-4859-a1f8-c112b1c39e3d"). InnerVolumeSpecName "kube-api-access-m5fwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.948681 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "69240bb4-8ef9-4859-a1f8-c112b1c39e3d" (UID: "69240bb4-8ef9-4859-a1f8-c112b1c39e3d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.963350 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5fwv\" (UniqueName: \"kubernetes.io/projected/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-kube-api-access-m5fwv\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.963392 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.963403 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69240bb4-8ef9-4859-a1f8-c112b1c39e3d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.976941 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jd5r8" event={"ID":"69240bb4-8ef9-4859-a1f8-c112b1c39e3d","Type":"ContainerDied","Data":"cb364542f980a49f554ee159547e9455bca6b07ee60ea77e5104a0bf2c6089b5"} Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.976952 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jd5r8" Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.977006 4644 scope.go:117] "RemoveContainer" containerID="c2c3d97ab5705d0441e777d8a657a0cca4aed68c2e3d8fa324122ad10717f7b2" Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.980673 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0dd20500-a2dd-4608-a3c8-7d714ffb09c4","Type":"ContainerStarted","Data":"0c64eda3372ed53fa4829f56319a85976b6aeb35dd760f5102f3c2ed0ad0c74b"} Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.981011 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.983524 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-txrxh-config-2n8t7" event={"ID":"54a575e1-c07b-4928-ab41-90deba7ddaad","Type":"ContainerStarted","Data":"b903409bc7a506c57d8fc1a615ac1d84ef6f572bc18574226c2f6cbd32970290"} Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.985301 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"706c4700-9d13-4bac-b7ac-0c19c09cc7e7","Type":"ContainerStarted","Data":"c99ebc148d695f48ca828e7137824a804d83c640f5fd71ecf18ddcbe3253fa5a"} Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.986252 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.988805 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lc7dc" event={"ID":"b1204c9c-4441-44c3-8c02-cbf9c3c74a88","Type":"ContainerStarted","Data":"22f93573de6ca094d257b6b1bb3762bc574821fb9d5c740d032ced1b46b72942"} Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.991907 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerStarted","Data":"e41b9eff015e4f806befa3edcbb8d5a320f89b209722141f8d73fdf31b010f61"} Dec 13 07:01:12 crc kubenswrapper[4644]: I1213 07:01:12.997385 4644 scope.go:117] "RemoveContainer" containerID="b26c4260c06fda49a36b107d6c8077981185d2e91ad8a9999be21d41f4cfccfe" Dec 13 07:01:13 crc kubenswrapper[4644]: I1213 07:01:13.008147 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=51.158892585 podStartE2EDuration="59.008131246s" podCreationTimestamp="2025-12-13 07:00:14 +0000 UTC" firstStartedPulling="2025-12-13 07:00:25.486519577 +0000 UTC m=+887.701470410" lastFinishedPulling="2025-12-13 07:00:33.335758239 +0000 UTC m=+895.550709071" observedRunningTime="2025-12-13 07:01:13.002432911 +0000 UTC m=+935.217383744" watchObservedRunningTime="2025-12-13 07:01:13.008131246 +0000 UTC m=+935.223082079" Dec 13 07:01:13 crc kubenswrapper[4644]: I1213 07:01:13.034946 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=50.063391922 podStartE2EDuration="59.034930715s" podCreationTimestamp="2025-12-13 07:00:14 +0000 UTC" firstStartedPulling="2025-12-13 07:00:25.461951462 +0000 UTC m=+887.676902295" lastFinishedPulling="2025-12-13 07:00:34.433490245 +0000 UTC m=+896.648441088" observedRunningTime="2025-12-13 07:01:13.028346816 +0000 UTC m=+935.243297650" watchObservedRunningTime="2025-12-13 07:01:13.034930715 +0000 UTC m=+935.249881549" Dec 13 07:01:13 crc kubenswrapper[4644]: I1213 07:01:13.037057 4644 scope.go:117] "RemoveContainer" containerID="365681d0fd52e4c0750d3e83f9e68d1d98b4135a01b6f55a0c202424ca09415a" Dec 13 07:01:13 crc kubenswrapper[4644]: I1213 07:01:13.046585 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-lc7dc" podStartSLOduration=1.683075674 podStartE2EDuration="14.046561965s" podCreationTimestamp="2025-12-13 07:00:59 +0000 UTC" firstStartedPulling="2025-12-13 07:01:00.095049136 +0000 UTC m=+922.309999968" lastFinishedPulling="2025-12-13 07:01:12.458535425 +0000 UTC m=+934.673486259" observedRunningTime="2025-12-13 07:01:13.043722637 +0000 UTC m=+935.258673470" watchObservedRunningTime="2025-12-13 07:01:13.046561965 +0000 UTC m=+935.261512798" Dec 13 07:01:13 crc kubenswrapper[4644]: I1213 07:01:13.083279 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jd5r8"] Dec 13 07:01:13 crc kubenswrapper[4644]: I1213 07:01:13.089268 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jd5r8"] Dec 13 07:01:14 crc kubenswrapper[4644]: I1213 07:01:14.008155 4644 generic.go:334] "Generic (PLEG): container finished" podID="54a575e1-c07b-4928-ab41-90deba7ddaad" containerID="850a847c976fc9bdf5b83ca9b3d7d9183b75eba17920b6b7642af80e5d0fe15c" exitCode=0 Dec 13 07:01:14 crc kubenswrapper[4644]: I1213 07:01:14.008201 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-txrxh-config-2n8t7" event={"ID":"54a575e1-c07b-4928-ab41-90deba7ddaad","Type":"ContainerDied","Data":"850a847c976fc9bdf5b83ca9b3d7d9183b75eba17920b6b7642af80e5d0fe15c"} Dec 13 07:01:14 crc kubenswrapper[4644]: I1213 07:01:14.184475 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-txrxh" Dec 13 07:01:14 crc kubenswrapper[4644]: I1213 07:01:14.397324 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69240bb4-8ef9-4859-a1f8-c112b1c39e3d" path="/var/lib/kubelet/pods/69240bb4-8ef9-4859-a1f8-c112b1c39e3d/volumes" Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.290538 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.406069 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54a575e1-c07b-4928-ab41-90deba7ddaad-scripts\") pod \"54a575e1-c07b-4928-ab41-90deba7ddaad\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.406156 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/54a575e1-c07b-4928-ab41-90deba7ddaad-additional-scripts\") pod \"54a575e1-c07b-4928-ab41-90deba7ddaad\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.406220 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-log-ovn\") pod \"54a575e1-c07b-4928-ab41-90deba7ddaad\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.406337 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-run\") pod \"54a575e1-c07b-4928-ab41-90deba7ddaad\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.406391 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lr7fw\" (UniqueName: \"kubernetes.io/projected/54a575e1-c07b-4928-ab41-90deba7ddaad-kube-api-access-lr7fw\") pod \"54a575e1-c07b-4928-ab41-90deba7ddaad\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.406419 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-run-ovn\") pod \"54a575e1-c07b-4928-ab41-90deba7ddaad\" (UID: \"54a575e1-c07b-4928-ab41-90deba7ddaad\") " Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.406609 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "54a575e1-c07b-4928-ab41-90deba7ddaad" (UID: "54a575e1-c07b-4928-ab41-90deba7ddaad"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.406818 4644 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.406823 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-run" (OuterVolumeSpecName: "var-run") pod "54a575e1-c07b-4928-ab41-90deba7ddaad" (UID: "54a575e1-c07b-4928-ab41-90deba7ddaad"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.407378 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54a575e1-c07b-4928-ab41-90deba7ddaad-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "54a575e1-c07b-4928-ab41-90deba7ddaad" (UID: "54a575e1-c07b-4928-ab41-90deba7ddaad"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.407420 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "54a575e1-c07b-4928-ab41-90deba7ddaad" (UID: "54a575e1-c07b-4928-ab41-90deba7ddaad"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.407678 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54a575e1-c07b-4928-ab41-90deba7ddaad-scripts" (OuterVolumeSpecName: "scripts") pod "54a575e1-c07b-4928-ab41-90deba7ddaad" (UID: "54a575e1-c07b-4928-ab41-90deba7ddaad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.422823 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54a575e1-c07b-4928-ab41-90deba7ddaad-kube-api-access-lr7fw" (OuterVolumeSpecName: "kube-api-access-lr7fw") pod "54a575e1-c07b-4928-ab41-90deba7ddaad" (UID: "54a575e1-c07b-4928-ab41-90deba7ddaad"). InnerVolumeSpecName "kube-api-access-lr7fw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.508657 4644 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-run\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.508691 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lr7fw\" (UniqueName: \"kubernetes.io/projected/54a575e1-c07b-4928-ab41-90deba7ddaad-kube-api-access-lr7fw\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.508716 4644 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/54a575e1-c07b-4928-ab41-90deba7ddaad-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.508725 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54a575e1-c07b-4928-ab41-90deba7ddaad-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:15 crc kubenswrapper[4644]: I1213 07:01:15.508734 4644 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/54a575e1-c07b-4928-ab41-90deba7ddaad-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:16 crc kubenswrapper[4644]: I1213 07:01:16.021262 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-txrxh-config-2n8t7" event={"ID":"54a575e1-c07b-4928-ab41-90deba7ddaad","Type":"ContainerDied","Data":"b903409bc7a506c57d8fc1a615ac1d84ef6f572bc18574226c2f6cbd32970290"} Dec 13 07:01:16 crc kubenswrapper[4644]: I1213 07:01:16.021492 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b903409bc7a506c57d8fc1a615ac1d84ef6f572bc18574226c2f6cbd32970290" Dec 13 07:01:16 crc kubenswrapper[4644]: I1213 07:01:16.021309 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-txrxh-config-2n8t7" Dec 13 07:01:16 crc kubenswrapper[4644]: I1213 07:01:16.397225 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-txrxh-config-2n8t7"] Dec 13 07:01:16 crc kubenswrapper[4644]: I1213 07:01:16.397263 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-txrxh-config-2n8t7"] Dec 13 07:01:17 crc kubenswrapper[4644]: I1213 07:01:17.029874 4644 generic.go:334] "Generic (PLEG): container finished" podID="b1204c9c-4441-44c3-8c02-cbf9c3c74a88" containerID="22f93573de6ca094d257b6b1bb3762bc574821fb9d5c740d032ced1b46b72942" exitCode=0 Dec 13 07:01:17 crc kubenswrapper[4644]: I1213 07:01:17.029958 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lc7dc" event={"ID":"b1204c9c-4441-44c3-8c02-cbf9c3c74a88","Type":"ContainerDied","Data":"22f93573de6ca094d257b6b1bb3762bc574821fb9d5c740d032ced1b46b72942"} Dec 13 07:01:18 crc kubenswrapper[4644]: E1213 07:01:18.225285 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47f5ebb6_8937_441f_9c9f_a1e31c401eed.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47f5ebb6_8937_441f_9c9f_a1e31c401eed.slice/crio-b8fc48432f72b692739947981613bbb69944b32e6c437e9a01d15154f417b1b5\": RecentStats: unable to find data in memory cache]" Dec 13 07:01:18 crc kubenswrapper[4644]: I1213 07:01:18.378877 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lc7dc" Dec 13 07:01:18 crc kubenswrapper[4644]: I1213 07:01:18.396573 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54a575e1-c07b-4928-ab41-90deba7ddaad" path="/var/lib/kubelet/pods/54a575e1-c07b-4928-ab41-90deba7ddaad/volumes" Dec 13 07:01:18 crc kubenswrapper[4644]: I1213 07:01:18.562739 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-combined-ca-bundle\") pod \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " Dec 13 07:01:18 crc kubenswrapper[4644]: I1213 07:01:18.562820 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wgk5\" (UniqueName: \"kubernetes.io/projected/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-kube-api-access-6wgk5\") pod \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " Dec 13 07:01:18 crc kubenswrapper[4644]: I1213 07:01:18.562888 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-config-data\") pod \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " Dec 13 07:01:18 crc kubenswrapper[4644]: I1213 07:01:18.562916 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-db-sync-config-data\") pod \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\" (UID: \"b1204c9c-4441-44c3-8c02-cbf9c3c74a88\") " Dec 13 07:01:18 crc kubenswrapper[4644]: I1213 07:01:18.566986 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b1204c9c-4441-44c3-8c02-cbf9c3c74a88" (UID: "b1204c9c-4441-44c3-8c02-cbf9c3c74a88"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:01:18 crc kubenswrapper[4644]: I1213 07:01:18.568822 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-kube-api-access-6wgk5" (OuterVolumeSpecName: "kube-api-access-6wgk5") pod "b1204c9c-4441-44c3-8c02-cbf9c3c74a88" (UID: "b1204c9c-4441-44c3-8c02-cbf9c3c74a88"). InnerVolumeSpecName "kube-api-access-6wgk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:01:18 crc kubenswrapper[4644]: I1213 07:01:18.580886 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1204c9c-4441-44c3-8c02-cbf9c3c74a88" (UID: "b1204c9c-4441-44c3-8c02-cbf9c3c74a88"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:01:18 crc kubenswrapper[4644]: I1213 07:01:18.593018 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-config-data" (OuterVolumeSpecName: "config-data") pod "b1204c9c-4441-44c3-8c02-cbf9c3c74a88" (UID: "b1204c9c-4441-44c3-8c02-cbf9c3c74a88"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:01:18 crc kubenswrapper[4644]: I1213 07:01:18.664730 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wgk5\" (UniqueName: \"kubernetes.io/projected/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-kube-api-access-6wgk5\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:18 crc kubenswrapper[4644]: I1213 07:01:18.664761 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:18 crc kubenswrapper[4644]: I1213 07:01:18.664775 4644 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:18 crc kubenswrapper[4644]: I1213 07:01:18.664789 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1204c9c-4441-44c3-8c02-cbf9c3c74a88-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.043630 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lc7dc" event={"ID":"b1204c9c-4441-44c3-8c02-cbf9c3c74a88","Type":"ContainerDied","Data":"c773e41efedf1367a309eac0dac0c269634945560d41308919ab3397a617516d"} Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.043666 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c773e41efedf1367a309eac0dac0c269634945560d41308919ab3397a617516d" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.043677 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lc7dc" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.393188 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bc56d6f79-jjz79"] Dec 13 07:01:19 crc kubenswrapper[4644]: E1213 07:01:19.393497 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54a575e1-c07b-4928-ab41-90deba7ddaad" containerName="ovn-config" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.393512 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="54a575e1-c07b-4928-ab41-90deba7ddaad" containerName="ovn-config" Dec 13 07:01:19 crc kubenswrapper[4644]: E1213 07:01:19.393534 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69240bb4-8ef9-4859-a1f8-c112b1c39e3d" containerName="extract-utilities" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.393540 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="69240bb4-8ef9-4859-a1f8-c112b1c39e3d" containerName="extract-utilities" Dec 13 07:01:19 crc kubenswrapper[4644]: E1213 07:01:19.393554 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1204c9c-4441-44c3-8c02-cbf9c3c74a88" containerName="glance-db-sync" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.393559 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1204c9c-4441-44c3-8c02-cbf9c3c74a88" containerName="glance-db-sync" Dec 13 07:01:19 crc kubenswrapper[4644]: E1213 07:01:19.393569 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69240bb4-8ef9-4859-a1f8-c112b1c39e3d" containerName="registry-server" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.393575 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="69240bb4-8ef9-4859-a1f8-c112b1c39e3d" containerName="registry-server" Dec 13 07:01:19 crc kubenswrapper[4644]: E1213 07:01:19.393587 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69240bb4-8ef9-4859-a1f8-c112b1c39e3d" containerName="extract-content" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.393593 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="69240bb4-8ef9-4859-a1f8-c112b1c39e3d" containerName="extract-content" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.393796 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="54a575e1-c07b-4928-ab41-90deba7ddaad" containerName="ovn-config" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.393812 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1204c9c-4441-44c3-8c02-cbf9c3c74a88" containerName="glance-db-sync" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.393827 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="69240bb4-8ef9-4859-a1f8-c112b1c39e3d" containerName="registry-server" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.394567 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.413387 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bc56d6f79-jjz79"] Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.578211 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jnvp\" (UniqueName: \"kubernetes.io/projected/bc9ebb37-f21c-4029-a614-ef0203c99d34-kube-api-access-6jnvp\") pod \"dnsmasq-dns-5bc56d6f79-jjz79\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.578270 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-config\") pod \"dnsmasq-dns-5bc56d6f79-jjz79\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.578307 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-ovsdbserver-sb\") pod \"dnsmasq-dns-5bc56d6f79-jjz79\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.578530 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-ovsdbserver-nb\") pod \"dnsmasq-dns-5bc56d6f79-jjz79\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.578740 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-dns-svc\") pod \"dnsmasq-dns-5bc56d6f79-jjz79\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.679859 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-config\") pod \"dnsmasq-dns-5bc56d6f79-jjz79\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.679905 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-ovsdbserver-sb\") pod \"dnsmasq-dns-5bc56d6f79-jjz79\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.679963 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-ovsdbserver-nb\") pod \"dnsmasq-dns-5bc56d6f79-jjz79\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.680000 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-dns-svc\") pod \"dnsmasq-dns-5bc56d6f79-jjz79\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.680047 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jnvp\" (UniqueName: \"kubernetes.io/projected/bc9ebb37-f21c-4029-a614-ef0203c99d34-kube-api-access-6jnvp\") pod \"dnsmasq-dns-5bc56d6f79-jjz79\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.680837 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-config\") pod \"dnsmasq-dns-5bc56d6f79-jjz79\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.680908 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-ovsdbserver-sb\") pod \"dnsmasq-dns-5bc56d6f79-jjz79\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.680927 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-dns-svc\") pod \"dnsmasq-dns-5bc56d6f79-jjz79\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.681039 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-ovsdbserver-nb\") pod \"dnsmasq-dns-5bc56d6f79-jjz79\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.695011 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jnvp\" (UniqueName: \"kubernetes.io/projected/bc9ebb37-f21c-4029-a614-ef0203c99d34-kube-api-access-6jnvp\") pod \"dnsmasq-dns-5bc56d6f79-jjz79\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:19 crc kubenswrapper[4644]: I1213 07:01:19.710543 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:20 crc kubenswrapper[4644]: I1213 07:01:20.202087 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bc56d6f79-jjz79"] Dec 13 07:01:21 crc kubenswrapper[4644]: I1213 07:01:21.056378 4644 generic.go:334] "Generic (PLEG): container finished" podID="bc9ebb37-f21c-4029-a614-ef0203c99d34" containerID="f6b1abd71872c5f3d463762c134739ad22c075e1e33797e37bb1d3806a0c9232" exitCode=0 Dec 13 07:01:21 crc kubenswrapper[4644]: I1213 07:01:21.056426 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" event={"ID":"bc9ebb37-f21c-4029-a614-ef0203c99d34","Type":"ContainerDied","Data":"f6b1abd71872c5f3d463762c134739ad22c075e1e33797e37bb1d3806a0c9232"} Dec 13 07:01:21 crc kubenswrapper[4644]: I1213 07:01:21.056611 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" event={"ID":"bc9ebb37-f21c-4029-a614-ef0203c99d34","Type":"ContainerStarted","Data":"acc0b16d7dca3dd767d820e3ea687712007990221a099fd0ffdaaf0134fb81a4"} Dec 13 07:01:22 crc kubenswrapper[4644]: I1213 07:01:22.063939 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" event={"ID":"bc9ebb37-f21c-4029-a614-ef0203c99d34","Type":"ContainerStarted","Data":"33424c46aa834c4c129fec50f45a9b4229c4fa5175b84b4a36f663a884dcb7a5"} Dec 13 07:01:22 crc kubenswrapper[4644]: I1213 07:01:22.064361 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:22 crc kubenswrapper[4644]: I1213 07:01:22.079586 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" podStartSLOduration=3.079569785 podStartE2EDuration="3.079569785s" podCreationTimestamp="2025-12-13 07:01:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:01:22.076373626 +0000 UTC m=+944.291324459" watchObservedRunningTime="2025-12-13 07:01:22.079569785 +0000 UTC m=+944.294520618" Dec 13 07:01:25 crc kubenswrapper[4644]: I1213 07:01:25.620111 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 13 07:01:25 crc kubenswrapper[4644]: I1213 07:01:25.883004 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-twbxq"] Dec 13 07:01:25 crc kubenswrapper[4644]: I1213 07:01:25.884137 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-twbxq" Dec 13 07:01:25 crc kubenswrapper[4644]: I1213 07:01:25.893765 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-twbxq"] Dec 13 07:01:25 crc kubenswrapper[4644]: I1213 07:01:25.916599 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:01:25 crc kubenswrapper[4644]: I1213 07:01:25.974332 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-w4gfg"] Dec 13 07:01:25 crc kubenswrapper[4644]: I1213 07:01:25.975512 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-w4gfg" Dec 13 07:01:25 crc kubenswrapper[4644]: I1213 07:01:25.985341 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5g22\" (UniqueName: \"kubernetes.io/projected/1db0e56b-985b-427f-9afa-d525e9de505d-kube-api-access-g5g22\") pod \"barbican-db-create-w4gfg\" (UID: \"1db0e56b-985b-427f-9afa-d525e9de505d\") " pod="openstack/barbican-db-create-w4gfg" Dec 13 07:01:25 crc kubenswrapper[4644]: I1213 07:01:25.985500 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1db0e56b-985b-427f-9afa-d525e9de505d-operator-scripts\") pod \"barbican-db-create-w4gfg\" (UID: \"1db0e56b-985b-427f-9afa-d525e9de505d\") " pod="openstack/barbican-db-create-w4gfg" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.003867 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-w4gfg"] Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.009540 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-2dff-account-create-update-rlqft"] Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.010559 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2dff-account-create-update-rlqft" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.012784 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.043911 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2dff-account-create-update-rlqft"] Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.083942 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-cec9-account-create-update-nt5x6"] Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.085155 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-cec9-account-create-update-nt5x6" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.086793 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1db0e56b-985b-427f-9afa-d525e9de505d-operator-scripts\") pod \"barbican-db-create-w4gfg\" (UID: \"1db0e56b-985b-427f-9afa-d525e9de505d\") " pod="openstack/barbican-db-create-w4gfg" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.087000 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xztv4\" (UniqueName: \"kubernetes.io/projected/423735a8-e97e-4ce8-aecf-287e8fe08713-kube-api-access-xztv4\") pod \"cinder-db-create-twbxq\" (UID: \"423735a8-e97e-4ce8-aecf-287e8fe08713\") " pod="openstack/cinder-db-create-twbxq" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.087270 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.087410 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5g22\" (UniqueName: \"kubernetes.io/projected/1db0e56b-985b-427f-9afa-d525e9de505d-kube-api-access-g5g22\") pod \"barbican-db-create-w4gfg\" (UID: \"1db0e56b-985b-427f-9afa-d525e9de505d\") " pod="openstack/barbican-db-create-w4gfg" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.087962 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1db0e56b-985b-427f-9afa-d525e9de505d-operator-scripts\") pod \"barbican-db-create-w4gfg\" (UID: \"1db0e56b-985b-427f-9afa-d525e9de505d\") " pod="openstack/barbican-db-create-w4gfg" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.087964 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/423735a8-e97e-4ce8-aecf-287e8fe08713-operator-scripts\") pod \"cinder-db-create-twbxq\" (UID: \"423735a8-e97e-4ce8-aecf-287e8fe08713\") " pod="openstack/cinder-db-create-twbxq" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.091793 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-cec9-account-create-update-nt5x6"] Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.132119 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5g22\" (UniqueName: \"kubernetes.io/projected/1db0e56b-985b-427f-9afa-d525e9de505d-kube-api-access-g5g22\") pod \"barbican-db-create-w4gfg\" (UID: \"1db0e56b-985b-427f-9afa-d525e9de505d\") " pod="openstack/barbican-db-create-w4gfg" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.189411 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/423735a8-e97e-4ce8-aecf-287e8fe08713-operator-scripts\") pod \"cinder-db-create-twbxq\" (UID: \"423735a8-e97e-4ce8-aecf-287e8fe08713\") " pod="openstack/cinder-db-create-twbxq" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.189479 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp28d\" (UniqueName: \"kubernetes.io/projected/c3cda71d-8d2a-4987-b267-e5cfcd8dd753-kube-api-access-rp28d\") pod \"cinder-cec9-account-create-update-nt5x6\" (UID: \"c3cda71d-8d2a-4987-b267-e5cfcd8dd753\") " pod="openstack/cinder-cec9-account-create-update-nt5x6" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.189546 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3cda71d-8d2a-4987-b267-e5cfcd8dd753-operator-scripts\") pod \"cinder-cec9-account-create-update-nt5x6\" (UID: \"c3cda71d-8d2a-4987-b267-e5cfcd8dd753\") " pod="openstack/cinder-cec9-account-create-update-nt5x6" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.189583 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq6l7\" (UniqueName: \"kubernetes.io/projected/486a00a0-302e-4867-b7e4-9d935d9278ac-kube-api-access-bq6l7\") pod \"barbican-2dff-account-create-update-rlqft\" (UID: \"486a00a0-302e-4867-b7e4-9d935d9278ac\") " pod="openstack/barbican-2dff-account-create-update-rlqft" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.189629 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xztv4\" (UniqueName: \"kubernetes.io/projected/423735a8-e97e-4ce8-aecf-287e8fe08713-kube-api-access-xztv4\") pod \"cinder-db-create-twbxq\" (UID: \"423735a8-e97e-4ce8-aecf-287e8fe08713\") " pod="openstack/cinder-db-create-twbxq" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.189656 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/486a00a0-302e-4867-b7e4-9d935d9278ac-operator-scripts\") pod \"barbican-2dff-account-create-update-rlqft\" (UID: \"486a00a0-302e-4867-b7e4-9d935d9278ac\") " pod="openstack/barbican-2dff-account-create-update-rlqft" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.190326 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/423735a8-e97e-4ce8-aecf-287e8fe08713-operator-scripts\") pod \"cinder-db-create-twbxq\" (UID: \"423735a8-e97e-4ce8-aecf-287e8fe08713\") " pod="openstack/cinder-db-create-twbxq" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.205126 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xztv4\" (UniqueName: \"kubernetes.io/projected/423735a8-e97e-4ce8-aecf-287e8fe08713-kube-api-access-xztv4\") pod \"cinder-db-create-twbxq\" (UID: \"423735a8-e97e-4ce8-aecf-287e8fe08713\") " pod="openstack/cinder-db-create-twbxq" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.289244 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-w4gfg" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.290593 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq6l7\" (UniqueName: \"kubernetes.io/projected/486a00a0-302e-4867-b7e4-9d935d9278ac-kube-api-access-bq6l7\") pod \"barbican-2dff-account-create-update-rlqft\" (UID: \"486a00a0-302e-4867-b7e4-9d935d9278ac\") " pod="openstack/barbican-2dff-account-create-update-rlqft" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.290665 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/486a00a0-302e-4867-b7e4-9d935d9278ac-operator-scripts\") pod \"barbican-2dff-account-create-update-rlqft\" (UID: \"486a00a0-302e-4867-b7e4-9d935d9278ac\") " pod="openstack/barbican-2dff-account-create-update-rlqft" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.290801 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp28d\" (UniqueName: \"kubernetes.io/projected/c3cda71d-8d2a-4987-b267-e5cfcd8dd753-kube-api-access-rp28d\") pod \"cinder-cec9-account-create-update-nt5x6\" (UID: \"c3cda71d-8d2a-4987-b267-e5cfcd8dd753\") " pod="openstack/cinder-cec9-account-create-update-nt5x6" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.290851 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3cda71d-8d2a-4987-b267-e5cfcd8dd753-operator-scripts\") pod \"cinder-cec9-account-create-update-nt5x6\" (UID: \"c3cda71d-8d2a-4987-b267-e5cfcd8dd753\") " pod="openstack/cinder-cec9-account-create-update-nt5x6" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.291593 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/486a00a0-302e-4867-b7e4-9d935d9278ac-operator-scripts\") pod \"barbican-2dff-account-create-update-rlqft\" (UID: \"486a00a0-302e-4867-b7e4-9d935d9278ac\") " pod="openstack/barbican-2dff-account-create-update-rlqft" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.291629 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3cda71d-8d2a-4987-b267-e5cfcd8dd753-operator-scripts\") pod \"cinder-cec9-account-create-update-nt5x6\" (UID: \"c3cda71d-8d2a-4987-b267-e5cfcd8dd753\") " pod="openstack/cinder-cec9-account-create-update-nt5x6" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.306246 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-brm8s"] Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.307146 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-brm8s" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.309247 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.315555 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp28d\" (UniqueName: \"kubernetes.io/projected/c3cda71d-8d2a-4987-b267-e5cfcd8dd753-kube-api-access-rp28d\") pod \"cinder-cec9-account-create-update-nt5x6\" (UID: \"c3cda71d-8d2a-4987-b267-e5cfcd8dd753\") " pod="openstack/cinder-cec9-account-create-update-nt5x6" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.320546 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-brm8s"] Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.336609 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-5hlkk" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.336703 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.336789 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.348527 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq6l7\" (UniqueName: \"kubernetes.io/projected/486a00a0-302e-4867-b7e4-9d935d9278ac-kube-api-access-bq6l7\") pod \"barbican-2dff-account-create-update-rlqft\" (UID: \"486a00a0-302e-4867-b7e4-9d935d9278ac\") " pod="openstack/barbican-2dff-account-create-update-rlqft" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.374729 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-rf9cs"] Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.393811 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rf9cs" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.416078 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-cec9-account-create-update-nt5x6" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.450378 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-rf9cs"] Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.458080 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-1e41-account-create-update-4wmw8"] Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.459327 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1e41-account-create-update-4wmw8" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.461685 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.467735 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-1e41-account-create-update-4wmw8"] Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.496401 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vs7jq\" (UniqueName: \"kubernetes.io/projected/badba5c3-308b-457e-a988-f7f49a15e48a-kube-api-access-vs7jq\") pod \"neutron-db-create-rf9cs\" (UID: \"badba5c3-308b-457e-a988-f7f49a15e48a\") " pod="openstack/neutron-db-create-rf9cs" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.496495 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-combined-ca-bundle\") pod \"keystone-db-sync-brm8s\" (UID: \"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8\") " pod="openstack/keystone-db-sync-brm8s" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.496555 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/badba5c3-308b-457e-a988-f7f49a15e48a-operator-scripts\") pod \"neutron-db-create-rf9cs\" (UID: \"badba5c3-308b-457e-a988-f7f49a15e48a\") " pod="openstack/neutron-db-create-rf9cs" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.496695 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r57hl\" (UniqueName: \"kubernetes.io/projected/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-kube-api-access-r57hl\") pod \"keystone-db-sync-brm8s\" (UID: \"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8\") " pod="openstack/keystone-db-sync-brm8s" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.496733 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-config-data\") pod \"keystone-db-sync-brm8s\" (UID: \"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8\") " pod="openstack/keystone-db-sync-brm8s" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.504492 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-twbxq" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.599349 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r57hl\" (UniqueName: \"kubernetes.io/projected/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-kube-api-access-r57hl\") pod \"keystone-db-sync-brm8s\" (UID: \"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8\") " pod="openstack/keystone-db-sync-brm8s" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.599397 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-config-data\") pod \"keystone-db-sync-brm8s\" (UID: \"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8\") " pod="openstack/keystone-db-sync-brm8s" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.599522 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vs7jq\" (UniqueName: \"kubernetes.io/projected/badba5c3-308b-457e-a988-f7f49a15e48a-kube-api-access-vs7jq\") pod \"neutron-db-create-rf9cs\" (UID: \"badba5c3-308b-457e-a988-f7f49a15e48a\") " pod="openstack/neutron-db-create-rf9cs" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.599578 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-combined-ca-bundle\") pod \"keystone-db-sync-brm8s\" (UID: \"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8\") " pod="openstack/keystone-db-sync-brm8s" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.599634 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/badba5c3-308b-457e-a988-f7f49a15e48a-operator-scripts\") pod \"neutron-db-create-rf9cs\" (UID: \"badba5c3-308b-457e-a988-f7f49a15e48a\") " pod="openstack/neutron-db-create-rf9cs" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.599661 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ba20404-0531-48f1-894f-5903b3ff71ef-operator-scripts\") pod \"neutron-1e41-account-create-update-4wmw8\" (UID: \"3ba20404-0531-48f1-894f-5903b3ff71ef\") " pod="openstack/neutron-1e41-account-create-update-4wmw8" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.599741 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lz7j5\" (UniqueName: \"kubernetes.io/projected/3ba20404-0531-48f1-894f-5903b3ff71ef-kube-api-access-lz7j5\") pod \"neutron-1e41-account-create-update-4wmw8\" (UID: \"3ba20404-0531-48f1-894f-5903b3ff71ef\") " pod="openstack/neutron-1e41-account-create-update-4wmw8" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.601173 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/badba5c3-308b-457e-a988-f7f49a15e48a-operator-scripts\") pod \"neutron-db-create-rf9cs\" (UID: \"badba5c3-308b-457e-a988-f7f49a15e48a\") " pod="openstack/neutron-db-create-rf9cs" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.603752 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-config-data\") pod \"keystone-db-sync-brm8s\" (UID: \"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8\") " pod="openstack/keystone-db-sync-brm8s" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.604773 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-combined-ca-bundle\") pod \"keystone-db-sync-brm8s\" (UID: \"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8\") " pod="openstack/keystone-db-sync-brm8s" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.617750 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vs7jq\" (UniqueName: \"kubernetes.io/projected/badba5c3-308b-457e-a988-f7f49a15e48a-kube-api-access-vs7jq\") pod \"neutron-db-create-rf9cs\" (UID: \"badba5c3-308b-457e-a988-f7f49a15e48a\") " pod="openstack/neutron-db-create-rf9cs" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.617901 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r57hl\" (UniqueName: \"kubernetes.io/projected/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-kube-api-access-r57hl\") pod \"keystone-db-sync-brm8s\" (UID: \"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8\") " pod="openstack/keystone-db-sync-brm8s" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.629090 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2dff-account-create-update-rlqft" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.701945 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ba20404-0531-48f1-894f-5903b3ff71ef-operator-scripts\") pod \"neutron-1e41-account-create-update-4wmw8\" (UID: \"3ba20404-0531-48f1-894f-5903b3ff71ef\") " pod="openstack/neutron-1e41-account-create-update-4wmw8" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.702240 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lz7j5\" (UniqueName: \"kubernetes.io/projected/3ba20404-0531-48f1-894f-5903b3ff71ef-kube-api-access-lz7j5\") pod \"neutron-1e41-account-create-update-4wmw8\" (UID: \"3ba20404-0531-48f1-894f-5903b3ff71ef\") " pod="openstack/neutron-1e41-account-create-update-4wmw8" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.704235 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ba20404-0531-48f1-894f-5903b3ff71ef-operator-scripts\") pod \"neutron-1e41-account-create-update-4wmw8\" (UID: \"3ba20404-0531-48f1-894f-5903b3ff71ef\") " pod="openstack/neutron-1e41-account-create-update-4wmw8" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.724510 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-brm8s" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.727823 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lz7j5\" (UniqueName: \"kubernetes.io/projected/3ba20404-0531-48f1-894f-5903b3ff71ef-kube-api-access-lz7j5\") pod \"neutron-1e41-account-create-update-4wmw8\" (UID: \"3ba20404-0531-48f1-894f-5903b3ff71ef\") " pod="openstack/neutron-1e41-account-create-update-4wmw8" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.744229 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rf9cs" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.787573 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1e41-account-create-update-4wmw8" Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.825736 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-w4gfg"] Dec 13 07:01:26 crc kubenswrapper[4644]: W1213 07:01:26.843729 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1db0e56b_985b_427f_9afa_d525e9de505d.slice/crio-9bb73a2233d428c5259852c002628e55419d51c5980478cbcc0a3f93a0787c8c WatchSource:0}: Error finding container 9bb73a2233d428c5259852c002628e55419d51c5980478cbcc0a3f93a0787c8c: Status 404 returned error can't find the container with id 9bb73a2233d428c5259852c002628e55419d51c5980478cbcc0a3f93a0787c8c Dec 13 07:01:26 crc kubenswrapper[4644]: I1213 07:01:26.920986 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-cec9-account-create-update-nt5x6"] Dec 13 07:01:26 crc kubenswrapper[4644]: W1213 07:01:26.937615 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3cda71d_8d2a_4987_b267_e5cfcd8dd753.slice/crio-93809076c93a2903f5c2ddf3c21118626a20f1d7d0c4dcf76e2b8088d6306488 WatchSource:0}: Error finding container 93809076c93a2903f5c2ddf3c21118626a20f1d7d0c4dcf76e2b8088d6306488: Status 404 returned error can't find the container with id 93809076c93a2903f5c2ddf3c21118626a20f1d7d0c4dcf76e2b8088d6306488 Dec 13 07:01:27 crc kubenswrapper[4644]: I1213 07:01:27.005633 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-twbxq"] Dec 13 07:01:27 crc kubenswrapper[4644]: W1213 07:01:27.025810 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod423735a8_e97e_4ce8_aecf_287e8fe08713.slice/crio-502345d279f021c063a638976d142016a0ebfdb523c6423ac60379c8b2eb22a0 WatchSource:0}: Error finding container 502345d279f021c063a638976d142016a0ebfdb523c6423ac60379c8b2eb22a0: Status 404 returned error can't find the container with id 502345d279f021c063a638976d142016a0ebfdb523c6423ac60379c8b2eb22a0 Dec 13 07:01:27 crc kubenswrapper[4644]: I1213 07:01:27.106992 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-twbxq" event={"ID":"423735a8-e97e-4ce8-aecf-287e8fe08713","Type":"ContainerStarted","Data":"502345d279f021c063a638976d142016a0ebfdb523c6423ac60379c8b2eb22a0"} Dec 13 07:01:27 crc kubenswrapper[4644]: I1213 07:01:27.134926 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-cec9-account-create-update-nt5x6" event={"ID":"c3cda71d-8d2a-4987-b267-e5cfcd8dd753","Type":"ContainerStarted","Data":"93809076c93a2903f5c2ddf3c21118626a20f1d7d0c4dcf76e2b8088d6306488"} Dec 13 07:01:27 crc kubenswrapper[4644]: I1213 07:01:27.157644 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-w4gfg" event={"ID":"1db0e56b-985b-427f-9afa-d525e9de505d","Type":"ContainerStarted","Data":"3a45c840e9e2118188e237411a5b1b39a859ff5e7f166799a56aea923b964910"} Dec 13 07:01:27 crc kubenswrapper[4644]: I1213 07:01:27.157690 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-w4gfg" event={"ID":"1db0e56b-985b-427f-9afa-d525e9de505d","Type":"ContainerStarted","Data":"9bb73a2233d428c5259852c002628e55419d51c5980478cbcc0a3f93a0787c8c"} Dec 13 07:01:27 crc kubenswrapper[4644]: I1213 07:01:27.171872 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2dff-account-create-update-rlqft"] Dec 13 07:01:27 crc kubenswrapper[4644]: I1213 07:01:27.279369 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-w4gfg" podStartSLOduration=2.279351552 podStartE2EDuration="2.279351552s" podCreationTimestamp="2025-12-13 07:01:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:01:27.195771932 +0000 UTC m=+949.410722764" watchObservedRunningTime="2025-12-13 07:01:27.279351552 +0000 UTC m=+949.494302385" Dec 13 07:01:27 crc kubenswrapper[4644]: I1213 07:01:27.279610 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-brm8s"] Dec 13 07:01:27 crc kubenswrapper[4644]: I1213 07:01:27.348150 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-rf9cs"] Dec 13 07:01:27 crc kubenswrapper[4644]: W1213 07:01:27.375938 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbadba5c3_308b_457e_a988_f7f49a15e48a.slice/crio-3f9b7bde8778a6b42238a5da5011889764e19577629fda7d6d5bdd3ed604400f WatchSource:0}: Error finding container 3f9b7bde8778a6b42238a5da5011889764e19577629fda7d6d5bdd3ed604400f: Status 404 returned error can't find the container with id 3f9b7bde8778a6b42238a5da5011889764e19577629fda7d6d5bdd3ed604400f Dec 13 07:01:27 crc kubenswrapper[4644]: I1213 07:01:27.401375 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-1e41-account-create-update-4wmw8"] Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.175119 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-brm8s" event={"ID":"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8","Type":"ContainerStarted","Data":"8a86629d3abb812163ada6b86bce5acfd4f134030f6dfb5df1747a86cbd2f273"} Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.178817 4644 generic.go:334] "Generic (PLEG): container finished" podID="badba5c3-308b-457e-a988-f7f49a15e48a" containerID="d60f19519e2d7f793086d5d5a5e433ea0346765c7e4bfd2b3c84b962126f6e80" exitCode=0 Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.178904 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rf9cs" event={"ID":"badba5c3-308b-457e-a988-f7f49a15e48a","Type":"ContainerDied","Data":"d60f19519e2d7f793086d5d5a5e433ea0346765c7e4bfd2b3c84b962126f6e80"} Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.178926 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rf9cs" event={"ID":"badba5c3-308b-457e-a988-f7f49a15e48a","Type":"ContainerStarted","Data":"3f9b7bde8778a6b42238a5da5011889764e19577629fda7d6d5bdd3ed604400f"} Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.180681 4644 generic.go:334] "Generic (PLEG): container finished" podID="486a00a0-302e-4867-b7e4-9d935d9278ac" containerID="07ce829422994523568085b330c2f843d15342910ba58e5f830d189a884e623b" exitCode=0 Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.180829 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2dff-account-create-update-rlqft" event={"ID":"486a00a0-302e-4867-b7e4-9d935d9278ac","Type":"ContainerDied","Data":"07ce829422994523568085b330c2f843d15342910ba58e5f830d189a884e623b"} Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.180864 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2dff-account-create-update-rlqft" event={"ID":"486a00a0-302e-4867-b7e4-9d935d9278ac","Type":"ContainerStarted","Data":"ac33c23d4ff63a766f75f4c7226b971ad3b6457bfb58bc9dc038304581ccf5f5"} Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.183595 4644 generic.go:334] "Generic (PLEG): container finished" podID="c3cda71d-8d2a-4987-b267-e5cfcd8dd753" containerID="185ea717dfc5086ba409c443e800ee4e9b3f3dce4f17bc951d6d54a42b9feacc" exitCode=0 Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.183642 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-cec9-account-create-update-nt5x6" event={"ID":"c3cda71d-8d2a-4987-b267-e5cfcd8dd753","Type":"ContainerDied","Data":"185ea717dfc5086ba409c443e800ee4e9b3f3dce4f17bc951d6d54a42b9feacc"} Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.185078 4644 generic.go:334] "Generic (PLEG): container finished" podID="3ba20404-0531-48f1-894f-5903b3ff71ef" containerID="c071450de3096cac70191b42fbf695c4db29ca7f41b8599b26297ddf026ba287" exitCode=0 Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.185121 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1e41-account-create-update-4wmw8" event={"ID":"3ba20404-0531-48f1-894f-5903b3ff71ef","Type":"ContainerDied","Data":"c071450de3096cac70191b42fbf695c4db29ca7f41b8599b26297ddf026ba287"} Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.185138 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1e41-account-create-update-4wmw8" event={"ID":"3ba20404-0531-48f1-894f-5903b3ff71ef","Type":"ContainerStarted","Data":"210ba74e0ee602066321fe52dc38920e45cd6b34103e12a85475d1243150fda7"} Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.186629 4644 generic.go:334] "Generic (PLEG): container finished" podID="1db0e56b-985b-427f-9afa-d525e9de505d" containerID="3a45c840e9e2118188e237411a5b1b39a859ff5e7f166799a56aea923b964910" exitCode=0 Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.186671 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-w4gfg" event={"ID":"1db0e56b-985b-427f-9afa-d525e9de505d","Type":"ContainerDied","Data":"3a45c840e9e2118188e237411a5b1b39a859ff5e7f166799a56aea923b964910"} Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.187873 4644 generic.go:334] "Generic (PLEG): container finished" podID="423735a8-e97e-4ce8-aecf-287e8fe08713" containerID="fe63ee29370897c239004e85ce61a0e905f75c1a55221eaa658c0831ff790b73" exitCode=0 Dec 13 07:01:28 crc kubenswrapper[4644]: I1213 07:01:28.187900 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-twbxq" event={"ID":"423735a8-e97e-4ce8-aecf-287e8fe08713","Type":"ContainerDied","Data":"fe63ee29370897c239004e85ce61a0e905f75c1a55221eaa658c0831ff790b73"} Dec 13 07:01:28 crc kubenswrapper[4644]: E1213 07:01:28.438153 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47f5ebb6_8937_441f_9c9f_a1e31c401eed.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47f5ebb6_8937_441f_9c9f_a1e31c401eed.slice/crio-b8fc48432f72b692739947981613bbb69944b32e6c437e9a01d15154f417b1b5\": RecentStats: unable to find data in memory cache]" Dec 13 07:01:29 crc kubenswrapper[4644]: I1213 07:01:29.711586 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:01:29 crc kubenswrapper[4644]: I1213 07:01:29.757199 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-jtk58"] Dec 13 07:01:29 crc kubenswrapper[4644]: I1213 07:01:29.758679 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-586b989cdc-jtk58" podUID="b52e9cb2-f628-4a79-a112-4c567318e8d3" containerName="dnsmasq-dns" containerID="cri-o://d850d996273d40330b3a0f39a43b40aa30be120a0e88e23e5fa7a7ac24aac8ef" gracePeriod=10 Dec 13 07:01:30 crc kubenswrapper[4644]: I1213 07:01:30.204860 4644 generic.go:334] "Generic (PLEG): container finished" podID="b52e9cb2-f628-4a79-a112-4c567318e8d3" containerID="d850d996273d40330b3a0f39a43b40aa30be120a0e88e23e5fa7a7ac24aac8ef" exitCode=0 Dec 13 07:01:30 crc kubenswrapper[4644]: I1213 07:01:30.204963 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-jtk58" event={"ID":"b52e9cb2-f628-4a79-a112-4c567318e8d3","Type":"ContainerDied","Data":"d850d996273d40330b3a0f39a43b40aa30be120a0e88e23e5fa7a7ac24aac8ef"} Dec 13 07:01:31 crc kubenswrapper[4644]: I1213 07:01:31.635548 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-586b989cdc-jtk58" podUID="b52e9cb2-f628-4a79-a112-4c567318e8d3" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.107:5353: connect: connection refused" Dec 13 07:01:31 crc kubenswrapper[4644]: I1213 07:01:31.907109 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2dff-account-create-update-rlqft" Dec 13 07:01:31 crc kubenswrapper[4644]: I1213 07:01:31.921733 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rf9cs" Dec 13 07:01:31 crc kubenswrapper[4644]: I1213 07:01:31.945179 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-w4gfg" Dec 13 07:01:31 crc kubenswrapper[4644]: I1213 07:01:31.952094 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1e41-account-create-update-4wmw8" Dec 13 07:01:31 crc kubenswrapper[4644]: I1213 07:01:31.973804 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-cec9-account-create-update-nt5x6" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.000886 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-twbxq" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.007101 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/486a00a0-302e-4867-b7e4-9d935d9278ac-operator-scripts\") pod \"486a00a0-302e-4867-b7e4-9d935d9278ac\" (UID: \"486a00a0-302e-4867-b7e4-9d935d9278ac\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.007162 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/badba5c3-308b-457e-a988-f7f49a15e48a-operator-scripts\") pod \"badba5c3-308b-457e-a988-f7f49a15e48a\" (UID: \"badba5c3-308b-457e-a988-f7f49a15e48a\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.007252 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bq6l7\" (UniqueName: \"kubernetes.io/projected/486a00a0-302e-4867-b7e4-9d935d9278ac-kube-api-access-bq6l7\") pod \"486a00a0-302e-4867-b7e4-9d935d9278ac\" (UID: \"486a00a0-302e-4867-b7e4-9d935d9278ac\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.007293 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vs7jq\" (UniqueName: \"kubernetes.io/projected/badba5c3-308b-457e-a988-f7f49a15e48a-kube-api-access-vs7jq\") pod \"badba5c3-308b-457e-a988-f7f49a15e48a\" (UID: \"badba5c3-308b-457e-a988-f7f49a15e48a\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.011353 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/badba5c3-308b-457e-a988-f7f49a15e48a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "badba5c3-308b-457e-a988-f7f49a15e48a" (UID: "badba5c3-308b-457e-a988-f7f49a15e48a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.012075 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/486a00a0-302e-4867-b7e4-9d935d9278ac-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "486a00a0-302e-4867-b7e4-9d935d9278ac" (UID: "486a00a0-302e-4867-b7e4-9d935d9278ac"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.013076 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/badba5c3-308b-457e-a988-f7f49a15e48a-kube-api-access-vs7jq" (OuterVolumeSpecName: "kube-api-access-vs7jq") pod "badba5c3-308b-457e-a988-f7f49a15e48a" (UID: "badba5c3-308b-457e-a988-f7f49a15e48a"). InnerVolumeSpecName "kube-api-access-vs7jq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.026017 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/486a00a0-302e-4867-b7e4-9d935d9278ac-kube-api-access-bq6l7" (OuterVolumeSpecName: "kube-api-access-bq6l7") pod "486a00a0-302e-4867-b7e4-9d935d9278ac" (UID: "486a00a0-302e-4867-b7e4-9d935d9278ac"). InnerVolumeSpecName "kube-api-access-bq6l7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.053592 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.108850 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5g22\" (UniqueName: \"kubernetes.io/projected/1db0e56b-985b-427f-9afa-d525e9de505d-kube-api-access-g5g22\") pod \"1db0e56b-985b-427f-9afa-d525e9de505d\" (UID: \"1db0e56b-985b-427f-9afa-d525e9de505d\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.108969 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz7j5\" (UniqueName: \"kubernetes.io/projected/3ba20404-0531-48f1-894f-5903b3ff71ef-kube-api-access-lz7j5\") pod \"3ba20404-0531-48f1-894f-5903b3ff71ef\" (UID: \"3ba20404-0531-48f1-894f-5903b3ff71ef\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.109003 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1db0e56b-985b-427f-9afa-d525e9de505d-operator-scripts\") pod \"1db0e56b-985b-427f-9afa-d525e9de505d\" (UID: \"1db0e56b-985b-427f-9afa-d525e9de505d\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.109025 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3cda71d-8d2a-4987-b267-e5cfcd8dd753-operator-scripts\") pod \"c3cda71d-8d2a-4987-b267-e5cfcd8dd753\" (UID: \"c3cda71d-8d2a-4987-b267-e5cfcd8dd753\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.109147 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rp28d\" (UniqueName: \"kubernetes.io/projected/c3cda71d-8d2a-4987-b267-e5cfcd8dd753-kube-api-access-rp28d\") pod \"c3cda71d-8d2a-4987-b267-e5cfcd8dd753\" (UID: \"c3cda71d-8d2a-4987-b267-e5cfcd8dd753\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.109171 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/423735a8-e97e-4ce8-aecf-287e8fe08713-operator-scripts\") pod \"423735a8-e97e-4ce8-aecf-287e8fe08713\" (UID: \"423735a8-e97e-4ce8-aecf-287e8fe08713\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.109244 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xztv4\" (UniqueName: \"kubernetes.io/projected/423735a8-e97e-4ce8-aecf-287e8fe08713-kube-api-access-xztv4\") pod \"423735a8-e97e-4ce8-aecf-287e8fe08713\" (UID: \"423735a8-e97e-4ce8-aecf-287e8fe08713\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.109315 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ba20404-0531-48f1-894f-5903b3ff71ef-operator-scripts\") pod \"3ba20404-0531-48f1-894f-5903b3ff71ef\" (UID: \"3ba20404-0531-48f1-894f-5903b3ff71ef\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.109648 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1db0e56b-985b-427f-9afa-d525e9de505d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1db0e56b-985b-427f-9afa-d525e9de505d" (UID: "1db0e56b-985b-427f-9afa-d525e9de505d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.110083 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1db0e56b-985b-427f-9afa-d525e9de505d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.110102 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bq6l7\" (UniqueName: \"kubernetes.io/projected/486a00a0-302e-4867-b7e4-9d935d9278ac-kube-api-access-bq6l7\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.110115 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vs7jq\" (UniqueName: \"kubernetes.io/projected/badba5c3-308b-457e-a988-f7f49a15e48a-kube-api-access-vs7jq\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.110126 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/486a00a0-302e-4867-b7e4-9d935d9278ac-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.110135 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/badba5c3-308b-457e-a988-f7f49a15e48a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.110501 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ba20404-0531-48f1-894f-5903b3ff71ef-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3ba20404-0531-48f1-894f-5903b3ff71ef" (UID: "3ba20404-0531-48f1-894f-5903b3ff71ef"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.110511 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/423735a8-e97e-4ce8-aecf-287e8fe08713-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "423735a8-e97e-4ce8-aecf-287e8fe08713" (UID: "423735a8-e97e-4ce8-aecf-287e8fe08713"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.110568 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3cda71d-8d2a-4987-b267-e5cfcd8dd753-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c3cda71d-8d2a-4987-b267-e5cfcd8dd753" (UID: "c3cda71d-8d2a-4987-b267-e5cfcd8dd753"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.112744 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3cda71d-8d2a-4987-b267-e5cfcd8dd753-kube-api-access-rp28d" (OuterVolumeSpecName: "kube-api-access-rp28d") pod "c3cda71d-8d2a-4987-b267-e5cfcd8dd753" (UID: "c3cda71d-8d2a-4987-b267-e5cfcd8dd753"). InnerVolumeSpecName "kube-api-access-rp28d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.113027 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/423735a8-e97e-4ce8-aecf-287e8fe08713-kube-api-access-xztv4" (OuterVolumeSpecName: "kube-api-access-xztv4") pod "423735a8-e97e-4ce8-aecf-287e8fe08713" (UID: "423735a8-e97e-4ce8-aecf-287e8fe08713"). InnerVolumeSpecName "kube-api-access-xztv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.113726 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1db0e56b-985b-427f-9afa-d525e9de505d-kube-api-access-g5g22" (OuterVolumeSpecName: "kube-api-access-g5g22") pod "1db0e56b-985b-427f-9afa-d525e9de505d" (UID: "1db0e56b-985b-427f-9afa-d525e9de505d"). InnerVolumeSpecName "kube-api-access-g5g22". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.113781 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ba20404-0531-48f1-894f-5903b3ff71ef-kube-api-access-lz7j5" (OuterVolumeSpecName: "kube-api-access-lz7j5") pod "3ba20404-0531-48f1-894f-5903b3ff71ef" (UID: "3ba20404-0531-48f1-894f-5903b3ff71ef"). InnerVolumeSpecName "kube-api-access-lz7j5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.211096 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-config\") pod \"b52e9cb2-f628-4a79-a112-4c567318e8d3\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.211186 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-ovsdbserver-nb\") pod \"b52e9cb2-f628-4a79-a112-4c567318e8d3\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.211252 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hf8w\" (UniqueName: \"kubernetes.io/projected/b52e9cb2-f628-4a79-a112-4c567318e8d3-kube-api-access-6hf8w\") pod \"b52e9cb2-f628-4a79-a112-4c567318e8d3\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.211297 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-ovsdbserver-sb\") pod \"b52e9cb2-f628-4a79-a112-4c567318e8d3\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.211360 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-dns-svc\") pod \"b52e9cb2-f628-4a79-a112-4c567318e8d3\" (UID: \"b52e9cb2-f628-4a79-a112-4c567318e8d3\") " Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.211925 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ba20404-0531-48f1-894f-5903b3ff71ef-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.211945 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5g22\" (UniqueName: \"kubernetes.io/projected/1db0e56b-985b-427f-9afa-d525e9de505d-kube-api-access-g5g22\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.211957 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz7j5\" (UniqueName: \"kubernetes.io/projected/3ba20404-0531-48f1-894f-5903b3ff71ef-kube-api-access-lz7j5\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.211966 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3cda71d-8d2a-4987-b267-e5cfcd8dd753-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.211976 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rp28d\" (UniqueName: \"kubernetes.io/projected/c3cda71d-8d2a-4987-b267-e5cfcd8dd753-kube-api-access-rp28d\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.211986 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/423735a8-e97e-4ce8-aecf-287e8fe08713-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.211994 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xztv4\" (UniqueName: \"kubernetes.io/projected/423735a8-e97e-4ce8-aecf-287e8fe08713-kube-api-access-xztv4\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.216410 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b52e9cb2-f628-4a79-a112-4c567318e8d3-kube-api-access-6hf8w" (OuterVolumeSpecName: "kube-api-access-6hf8w") pod "b52e9cb2-f628-4a79-a112-4c567318e8d3" (UID: "b52e9cb2-f628-4a79-a112-4c567318e8d3"). InnerVolumeSpecName "kube-api-access-6hf8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.228043 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-cec9-account-create-update-nt5x6" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.228031 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-cec9-account-create-update-nt5x6" event={"ID":"c3cda71d-8d2a-4987-b267-e5cfcd8dd753","Type":"ContainerDied","Data":"93809076c93a2903f5c2ddf3c21118626a20f1d7d0c4dcf76e2b8088d6306488"} Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.228151 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93809076c93a2903f5c2ddf3c21118626a20f1d7d0c4dcf76e2b8088d6306488" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.231619 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1e41-account-create-update-4wmw8" event={"ID":"3ba20404-0531-48f1-894f-5903b3ff71ef","Type":"ContainerDied","Data":"210ba74e0ee602066321fe52dc38920e45cd6b34103e12a85475d1243150fda7"} Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.231644 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="210ba74e0ee602066321fe52dc38920e45cd6b34103e12a85475d1243150fda7" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.231683 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1e41-account-create-update-4wmw8" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.234500 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-w4gfg" event={"ID":"1db0e56b-985b-427f-9afa-d525e9de505d","Type":"ContainerDied","Data":"9bb73a2233d428c5259852c002628e55419d51c5980478cbcc0a3f93a0787c8c"} Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.234539 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9bb73a2233d428c5259852c002628e55419d51c5980478cbcc0a3f93a0787c8c" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.234510 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-w4gfg" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.239246 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-twbxq" event={"ID":"423735a8-e97e-4ce8-aecf-287e8fe08713","Type":"ContainerDied","Data":"502345d279f021c063a638976d142016a0ebfdb523c6423ac60379c8b2eb22a0"} Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.239297 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="502345d279f021c063a638976d142016a0ebfdb523c6423ac60379c8b2eb22a0" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.239380 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-twbxq" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.245302 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-brm8s" event={"ID":"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8","Type":"ContainerStarted","Data":"d4b12782f57689df3cd61e9735567e6191173273e999caedaaac3513c6749256"} Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.248360 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rf9cs" event={"ID":"badba5c3-308b-457e-a988-f7f49a15e48a","Type":"ContainerDied","Data":"3f9b7bde8778a6b42238a5da5011889764e19577629fda7d6d5bdd3ed604400f"} Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.248488 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f9b7bde8778a6b42238a5da5011889764e19577629fda7d6d5bdd3ed604400f" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.248482 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b52e9cb2-f628-4a79-a112-4c567318e8d3" (UID: "b52e9cb2-f628-4a79-a112-4c567318e8d3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.249060 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rf9cs" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.251560 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-jtk58" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.251879 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-jtk58" event={"ID":"b52e9cb2-f628-4a79-a112-4c567318e8d3","Type":"ContainerDied","Data":"66fd34dd4d8a13957072d640dd724b3ebdcc55c10619c9902ac0c1e9dff8969d"} Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.252006 4644 scope.go:117] "RemoveContainer" containerID="d850d996273d40330b3a0f39a43b40aa30be120a0e88e23e5fa7a7ac24aac8ef" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.252680 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-config" (OuterVolumeSpecName: "config") pod "b52e9cb2-f628-4a79-a112-4c567318e8d3" (UID: "b52e9cb2-f628-4a79-a112-4c567318e8d3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.253217 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2dff-account-create-update-rlqft" event={"ID":"486a00a0-302e-4867-b7e4-9d935d9278ac","Type":"ContainerDied","Data":"ac33c23d4ff63a766f75f4c7226b971ad3b6457bfb58bc9dc038304581ccf5f5"} Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.253239 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac33c23d4ff63a766f75f4c7226b971ad3b6457bfb58bc9dc038304581ccf5f5" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.253259 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2dff-account-create-update-rlqft" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.260536 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b52e9cb2-f628-4a79-a112-4c567318e8d3" (UID: "b52e9cb2-f628-4a79-a112-4c567318e8d3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.278069 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b52e9cb2-f628-4a79-a112-4c567318e8d3" (UID: "b52e9cb2-f628-4a79-a112-4c567318e8d3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.284611 4644 scope.go:117] "RemoveContainer" containerID="6e0d283977ffe4a42c805a7d8d2a6206ca691c76c1a172a21daffc3fd61a4f7b" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.314271 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.314301 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.314312 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hf8w\" (UniqueName: \"kubernetes.io/projected/b52e9cb2-f628-4a79-a112-4c567318e8d3-kube-api-access-6hf8w\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.314321 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.314330 4644 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b52e9cb2-f628-4a79-a112-4c567318e8d3-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.577056 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-brm8s" podStartSLOduration=2.115525973 podStartE2EDuration="6.577037914s" podCreationTimestamp="2025-12-13 07:01:26 +0000 UTC" firstStartedPulling="2025-12-13 07:01:27.316017183 +0000 UTC m=+949.530968016" lastFinishedPulling="2025-12-13 07:01:31.777529124 +0000 UTC m=+953.992479957" observedRunningTime="2025-12-13 07:01:32.264829313 +0000 UTC m=+954.479780146" watchObservedRunningTime="2025-12-13 07:01:32.577037914 +0000 UTC m=+954.791988748" Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.579194 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-jtk58"] Dec 13 07:01:32 crc kubenswrapper[4644]: I1213 07:01:32.586475 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-jtk58"] Dec 13 07:01:34 crc kubenswrapper[4644]: I1213 07:01:34.269526 4644 generic.go:334] "Generic (PLEG): container finished" podID="a92835b2-c7ad-4de9-bee6-ab7acbe20fb8" containerID="d4b12782f57689df3cd61e9735567e6191173273e999caedaaac3513c6749256" exitCode=0 Dec 13 07:01:34 crc kubenswrapper[4644]: I1213 07:01:34.269598 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-brm8s" event={"ID":"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8","Type":"ContainerDied","Data":"d4b12782f57689df3cd61e9735567e6191173273e999caedaaac3513c6749256"} Dec 13 07:01:34 crc kubenswrapper[4644]: I1213 07:01:34.400546 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b52e9cb2-f628-4a79-a112-4c567318e8d3" path="/var/lib/kubelet/pods/b52e9cb2-f628-4a79-a112-4c567318e8d3/volumes" Dec 13 07:01:35 crc kubenswrapper[4644]: I1213 07:01:35.545379 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-brm8s" Dec 13 07:01:35 crc kubenswrapper[4644]: I1213 07:01:35.671816 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r57hl\" (UniqueName: \"kubernetes.io/projected/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-kube-api-access-r57hl\") pod \"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8\" (UID: \"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8\") " Dec 13 07:01:35 crc kubenswrapper[4644]: I1213 07:01:35.671910 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-combined-ca-bundle\") pod \"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8\" (UID: \"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8\") " Dec 13 07:01:35 crc kubenswrapper[4644]: I1213 07:01:35.672039 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-config-data\") pod \"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8\" (UID: \"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8\") " Dec 13 07:01:35 crc kubenswrapper[4644]: I1213 07:01:35.676694 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-kube-api-access-r57hl" (OuterVolumeSpecName: "kube-api-access-r57hl") pod "a92835b2-c7ad-4de9-bee6-ab7acbe20fb8" (UID: "a92835b2-c7ad-4de9-bee6-ab7acbe20fb8"). InnerVolumeSpecName "kube-api-access-r57hl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:01:35 crc kubenswrapper[4644]: I1213 07:01:35.691085 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a92835b2-c7ad-4de9-bee6-ab7acbe20fb8" (UID: "a92835b2-c7ad-4de9-bee6-ab7acbe20fb8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:01:35 crc kubenswrapper[4644]: I1213 07:01:35.706318 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-config-data" (OuterVolumeSpecName: "config-data") pod "a92835b2-c7ad-4de9-bee6-ab7acbe20fb8" (UID: "a92835b2-c7ad-4de9-bee6-ab7acbe20fb8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:01:35 crc kubenswrapper[4644]: I1213 07:01:35.773594 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r57hl\" (UniqueName: \"kubernetes.io/projected/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-kube-api-access-r57hl\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:35 crc kubenswrapper[4644]: I1213 07:01:35.773624 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:35 crc kubenswrapper[4644]: I1213 07:01:35.773634 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.283410 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-brm8s" event={"ID":"a92835b2-c7ad-4de9-bee6-ab7acbe20fb8","Type":"ContainerDied","Data":"8a86629d3abb812163ada6b86bce5acfd4f134030f6dfb5df1747a86cbd2f273"} Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.283464 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a86629d3abb812163ada6b86bce5acfd4f134030f6dfb5df1747a86cbd2f273" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.283462 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-brm8s" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.488525 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66dd8c6975-9229l"] Dec 13 07:01:36 crc kubenswrapper[4644]: E1213 07:01:36.488852 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3cda71d-8d2a-4987-b267-e5cfcd8dd753" containerName="mariadb-account-create-update" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.488869 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3cda71d-8d2a-4987-b267-e5cfcd8dd753" containerName="mariadb-account-create-update" Dec 13 07:01:36 crc kubenswrapper[4644]: E1213 07:01:36.488877 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="486a00a0-302e-4867-b7e4-9d935d9278ac" containerName="mariadb-account-create-update" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.488884 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="486a00a0-302e-4867-b7e4-9d935d9278ac" containerName="mariadb-account-create-update" Dec 13 07:01:36 crc kubenswrapper[4644]: E1213 07:01:36.488902 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b52e9cb2-f628-4a79-a112-4c567318e8d3" containerName="init" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.488908 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="b52e9cb2-f628-4a79-a112-4c567318e8d3" containerName="init" Dec 13 07:01:36 crc kubenswrapper[4644]: E1213 07:01:36.488915 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ba20404-0531-48f1-894f-5903b3ff71ef" containerName="mariadb-account-create-update" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.488920 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ba20404-0531-48f1-894f-5903b3ff71ef" containerName="mariadb-account-create-update" Dec 13 07:01:36 crc kubenswrapper[4644]: E1213 07:01:36.488929 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1db0e56b-985b-427f-9afa-d525e9de505d" containerName="mariadb-database-create" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.488934 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1db0e56b-985b-427f-9afa-d525e9de505d" containerName="mariadb-database-create" Dec 13 07:01:36 crc kubenswrapper[4644]: E1213 07:01:36.488942 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="423735a8-e97e-4ce8-aecf-287e8fe08713" containerName="mariadb-database-create" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.488948 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="423735a8-e97e-4ce8-aecf-287e8fe08713" containerName="mariadb-database-create" Dec 13 07:01:36 crc kubenswrapper[4644]: E1213 07:01:36.488958 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="badba5c3-308b-457e-a988-f7f49a15e48a" containerName="mariadb-database-create" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.488963 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="badba5c3-308b-457e-a988-f7f49a15e48a" containerName="mariadb-database-create" Dec 13 07:01:36 crc kubenswrapper[4644]: E1213 07:01:36.488971 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b52e9cb2-f628-4a79-a112-4c567318e8d3" containerName="dnsmasq-dns" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.488976 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="b52e9cb2-f628-4a79-a112-4c567318e8d3" containerName="dnsmasq-dns" Dec 13 07:01:36 crc kubenswrapper[4644]: E1213 07:01:36.488986 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a92835b2-c7ad-4de9-bee6-ab7acbe20fb8" containerName="keystone-db-sync" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.488992 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="a92835b2-c7ad-4de9-bee6-ab7acbe20fb8" containerName="keystone-db-sync" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.489159 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3cda71d-8d2a-4987-b267-e5cfcd8dd753" containerName="mariadb-account-create-update" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.489170 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="1db0e56b-985b-427f-9afa-d525e9de505d" containerName="mariadb-database-create" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.489179 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="486a00a0-302e-4867-b7e4-9d935d9278ac" containerName="mariadb-account-create-update" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.489190 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="a92835b2-c7ad-4de9-bee6-ab7acbe20fb8" containerName="keystone-db-sync" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.489203 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="423735a8-e97e-4ce8-aecf-287e8fe08713" containerName="mariadb-database-create" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.489212 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="badba5c3-308b-457e-a988-f7f49a15e48a" containerName="mariadb-database-create" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.489219 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ba20404-0531-48f1-894f-5903b3ff71ef" containerName="mariadb-account-create-update" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.489227 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="b52e9cb2-f628-4a79-a112-4c567318e8d3" containerName="dnsmasq-dns" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.490018 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.509517 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66dd8c6975-9229l"] Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.554894 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-j5z52"] Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.555927 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.557973 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.558284 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-5hlkk" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.558429 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.558626 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.558765 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.565639 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-j5z52"] Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.584684 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsws2\" (UniqueName: \"kubernetes.io/projected/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-kube-api-access-vsws2\") pod \"dnsmasq-dns-66dd8c6975-9229l\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.584804 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-ovsdbserver-sb\") pod \"dnsmasq-dns-66dd8c6975-9229l\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.584853 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-config\") pod \"dnsmasq-dns-66dd8c6975-9229l\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.584950 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-dns-svc\") pod \"dnsmasq-dns-66dd8c6975-9229l\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.585066 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-ovsdbserver-nb\") pod \"dnsmasq-dns-66dd8c6975-9229l\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.660506 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7fb68bc955-v6xbp"] Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.661755 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.665306 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-p6br7" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.665401 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.665607 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.668751 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.686006 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-ovsdbserver-nb\") pod \"dnsmasq-dns-66dd8c6975-9229l\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.686110 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsws2\" (UniqueName: \"kubernetes.io/projected/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-kube-api-access-vsws2\") pod \"dnsmasq-dns-66dd8c6975-9229l\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.686164 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-ovsdbserver-sb\") pod \"dnsmasq-dns-66dd8c6975-9229l\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.686199 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spxjr\" (UniqueName: \"kubernetes.io/projected/dad101f4-3870-4a48-88da-892e436608cf-kube-api-access-spxjr\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.686232 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-config\") pod \"dnsmasq-dns-66dd8c6975-9229l\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.686256 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-combined-ca-bundle\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.686286 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-config-data\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.686318 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-scripts\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.686373 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-dns-svc\") pod \"dnsmasq-dns-66dd8c6975-9229l\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.686396 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-credential-keys\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.686418 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-fernet-keys\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.687165 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-ovsdbserver-sb\") pod \"dnsmasq-dns-66dd8c6975-9229l\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.687191 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-config\") pod \"dnsmasq-dns-66dd8c6975-9229l\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.687430 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-dns-svc\") pod \"dnsmasq-dns-66dd8c6975-9229l\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.687852 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-ovsdbserver-nb\") pod \"dnsmasq-dns-66dd8c6975-9229l\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.698459 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7fb68bc955-v6xbp"] Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.714778 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsws2\" (UniqueName: \"kubernetes.io/projected/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-kube-api-access-vsws2\") pod \"dnsmasq-dns-66dd8c6975-9229l\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.766195 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-fdgxf"] Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.767493 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.771594 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.771651 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.771816 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-jrn9p" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.776093 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-fdgxf"] Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.783325 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-55f676769c-f5b8d"] Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.784579 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.792262 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0a6ff01-7d9f-4725-96e0-82d5750b9635-logs\") pod \"horizon-7fb68bc955-v6xbp\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.792302 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spxjr\" (UniqueName: \"kubernetes.io/projected/dad101f4-3870-4a48-88da-892e436608cf-kube-api-access-spxjr\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.792360 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-combined-ca-bundle\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.792407 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-config-data\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.792456 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-scripts\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.792533 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-credential-keys\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.792556 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-fernet-keys\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.792572 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0a6ff01-7d9f-4725-96e0-82d5750b9635-scripts\") pod \"horizon-7fb68bc955-v6xbp\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.792681 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0a6ff01-7d9f-4725-96e0-82d5750b9635-config-data\") pod \"horizon-7fb68bc955-v6xbp\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.792712 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwzjw\" (UniqueName: \"kubernetes.io/projected/c0a6ff01-7d9f-4725-96e0-82d5750b9635-kube-api-access-bwzjw\") pod \"horizon-7fb68bc955-v6xbp\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.792753 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0a6ff01-7d9f-4725-96e0-82d5750b9635-horizon-secret-key\") pod \"horizon-7fb68bc955-v6xbp\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.803862 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-combined-ca-bundle\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.806222 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.806634 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-config-data\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.810854 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-fernet-keys\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.811114 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-scripts\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.813180 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spxjr\" (UniqueName: \"kubernetes.io/projected/dad101f4-3870-4a48-88da-892e436608cf-kube-api-access-spxjr\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.813501 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-credential-keys\") pod \"keystone-bootstrap-j5z52\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.817556 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.819415 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.834105 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.834414 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.849531 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-55f676769c-f5b8d"] Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.867159 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.891090 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.906104 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gg5pv\" (UniqueName: \"kubernetes.io/projected/bf873768-3def-4a7c-b48b-fb3749f8c927-kube-api-access-gg5pv\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.906186 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0a6ff01-7d9f-4725-96e0-82d5750b9635-scripts\") pod \"horizon-7fb68bc955-v6xbp\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.906221 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-config-data\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.906292 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-combined-ca-bundle\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.906327 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa16de04-2c8d-4421-8e23-c6dd519bf22d-config-data\") pod \"horizon-55f676769c-f5b8d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.906372 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf873768-3def-4a7c-b48b-fb3749f8c927-etc-machine-id\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.906402 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa16de04-2c8d-4421-8e23-c6dd519bf22d-scripts\") pod \"horizon-55f676769c-f5b8d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.909239 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0a6ff01-7d9f-4725-96e0-82d5750b9635-scripts\") pod \"horizon-7fb68bc955-v6xbp\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.933911 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0a6ff01-7d9f-4725-96e0-82d5750b9635-config-data\") pod \"horizon-7fb68bc955-v6xbp\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.934015 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwzjw\" (UniqueName: \"kubernetes.io/projected/c0a6ff01-7d9f-4725-96e0-82d5750b9635-kube-api-access-bwzjw\") pod \"horizon-7fb68bc955-v6xbp\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.934056 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0a6ff01-7d9f-4725-96e0-82d5750b9635-horizon-secret-key\") pod \"horizon-7fb68bc955-v6xbp\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.934148 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fa16de04-2c8d-4421-8e23-c6dd519bf22d-horizon-secret-key\") pod \"horizon-55f676769c-f5b8d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.934226 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgslq\" (UniqueName: \"kubernetes.io/projected/fa16de04-2c8d-4421-8e23-c6dd519bf22d-kube-api-access-tgslq\") pod \"horizon-55f676769c-f5b8d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.934284 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0a6ff01-7d9f-4725-96e0-82d5750b9635-logs\") pod \"horizon-7fb68bc955-v6xbp\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.934379 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-scripts\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.934430 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-db-sync-config-data\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.934495 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa16de04-2c8d-4421-8e23-c6dd519bf22d-logs\") pod \"horizon-55f676769c-f5b8d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.935737 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0a6ff01-7d9f-4725-96e0-82d5750b9635-config-data\") pod \"horizon-7fb68bc955-v6xbp\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.936823 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0a6ff01-7d9f-4725-96e0-82d5750b9635-logs\") pod \"horizon-7fb68bc955-v6xbp\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:36 crc kubenswrapper[4644]: I1213 07:01:36.982945 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0a6ff01-7d9f-4725-96e0-82d5750b9635-horizon-secret-key\") pod \"horizon-7fb68bc955-v6xbp\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.014763 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwzjw\" (UniqueName: \"kubernetes.io/projected/c0a6ff01-7d9f-4725-96e0-82d5750b9635-kube-api-access-bwzjw\") pod \"horizon-7fb68bc955-v6xbp\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.044541 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-config-data\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.044664 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-combined-ca-bundle\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.044712 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa16de04-2c8d-4421-8e23-c6dd519bf22d-config-data\") pod \"horizon-55f676769c-f5b8d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.044767 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf873768-3def-4a7c-b48b-fb3749f8c927-etc-machine-id\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.044798 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa16de04-2c8d-4421-8e23-c6dd519bf22d-scripts\") pod \"horizon-55f676769c-f5b8d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.044842 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c601fe2-d854-42ef-885a-e2acc45f1607-run-httpd\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.044882 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-scripts\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.044905 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sphwp\" (UniqueName: \"kubernetes.io/projected/7c601fe2-d854-42ef-885a-e2acc45f1607-kube-api-access-sphwp\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.044975 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fa16de04-2c8d-4421-8e23-c6dd519bf22d-horizon-secret-key\") pod \"horizon-55f676769c-f5b8d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.045024 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgslq\" (UniqueName: \"kubernetes.io/projected/fa16de04-2c8d-4421-8e23-c6dd519bf22d-kube-api-access-tgslq\") pod \"horizon-55f676769c-f5b8d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.045055 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c601fe2-d854-42ef-885a-e2acc45f1607-log-httpd\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.045074 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.045130 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.045173 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-scripts\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.045196 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-db-sync-config-data\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.045233 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa16de04-2c8d-4421-8e23-c6dd519bf22d-logs\") pod \"horizon-55f676769c-f5b8d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.045288 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gg5pv\" (UniqueName: \"kubernetes.io/projected/bf873768-3def-4a7c-b48b-fb3749f8c927-kube-api-access-gg5pv\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.045320 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-config-data\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.048422 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf873768-3def-4a7c-b48b-fb3749f8c927-etc-machine-id\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.053641 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa16de04-2c8d-4421-8e23-c6dd519bf22d-scripts\") pod \"horizon-55f676769c-f5b8d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.059023 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-542cr"] Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.060962 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-542cr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.067000 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa16de04-2c8d-4421-8e23-c6dd519bf22d-config-data\") pod \"horizon-55f676769c-f5b8d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.069349 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-scripts\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.073562 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-db-sync-config-data\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.074091 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66dd8c6975-9229l"] Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.074245 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa16de04-2c8d-4421-8e23-c6dd519bf22d-logs\") pod \"horizon-55f676769c-f5b8d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.074539 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.074644 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-qj6rp" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.075250 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.078890 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fa16de04-2c8d-4421-8e23-c6dd519bf22d-horizon-secret-key\") pod \"horizon-55f676769c-f5b8d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.083145 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-config-data\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.087300 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-combined-ca-bundle\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.093185 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgslq\" (UniqueName: \"kubernetes.io/projected/fa16de04-2c8d-4421-8e23-c6dd519bf22d-kube-api-access-tgslq\") pod \"horizon-55f676769c-f5b8d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.097748 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-bk2fx"] Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.098778 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.103563 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.103710 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.103899 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-nfkkq" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.106015 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gg5pv\" (UniqueName: \"kubernetes.io/projected/bf873768-3def-4a7c-b48b-fb3749f8c927-kube-api-access-gg5pv\") pod \"cinder-db-sync-fdgxf\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.106828 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-bk2fx"] Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.120346 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-542cr"] Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.126278 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-dwfrr"] Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.128660 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-dwfrr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.131877 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.132050 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-5td4f" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.135888 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-694dbb6647-8bdf9"] Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.137699 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.141611 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-dwfrr"] Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.147842 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk4qf\" (UniqueName: \"kubernetes.io/projected/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-kube-api-access-lk4qf\") pod \"neutron-db-sync-542cr\" (UID: \"ae5ec083-82fc-4a1b-826f-50536ee5fcd0\") " pod="openstack/neutron-db-sync-542cr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.147931 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-config-data\") pod \"placement-db-sync-bk2fx\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148035 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-combined-ca-bundle\") pod \"neutron-db-sync-542cr\" (UID: \"ae5ec083-82fc-4a1b-826f-50536ee5fcd0\") " pod="openstack/neutron-db-sync-542cr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148093 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-config-data\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148124 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-combined-ca-bundle\") pod \"placement-db-sync-bk2fx\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148196 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-scripts\") pod \"placement-db-sync-bk2fx\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148244 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c601fe2-d854-42ef-885a-e2acc45f1607-run-httpd\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148278 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-scripts\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148293 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sphwp\" (UniqueName: \"kubernetes.io/projected/7c601fe2-d854-42ef-885a-e2acc45f1607-kube-api-access-sphwp\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148349 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-logs\") pod \"placement-db-sync-bk2fx\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148365 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f264a4d-92e3-4a69-a07f-00a3f0802484-combined-ca-bundle\") pod \"barbican-db-sync-dwfrr\" (UID: \"2f264a4d-92e3-4a69-a07f-00a3f0802484\") " pod="openstack/barbican-db-sync-dwfrr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148434 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2f264a4d-92e3-4a69-a07f-00a3f0802484-db-sync-config-data\") pod \"barbican-db-sync-dwfrr\" (UID: \"2f264a4d-92e3-4a69-a07f-00a3f0802484\") " pod="openstack/barbican-db-sync-dwfrr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148487 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c601fe2-d854-42ef-885a-e2acc45f1607-log-httpd\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148504 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggszd\" (UniqueName: \"kubernetes.io/projected/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-kube-api-access-ggszd\") pod \"placement-db-sync-bk2fx\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148541 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148591 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148641 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcl79\" (UniqueName: \"kubernetes.io/projected/2f264a4d-92e3-4a69-a07f-00a3f0802484-kube-api-access-xcl79\") pod \"barbican-db-sync-dwfrr\" (UID: \"2f264a4d-92e3-4a69-a07f-00a3f0802484\") " pod="openstack/barbican-db-sync-dwfrr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.148672 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-config\") pod \"neutron-db-sync-542cr\" (UID: \"ae5ec083-82fc-4a1b-826f-50536ee5fcd0\") " pod="openstack/neutron-db-sync-542cr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.151865 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-694dbb6647-8bdf9"] Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.154582 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.156753 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c601fe2-d854-42ef-885a-e2acc45f1607-run-httpd\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.156855 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.157644 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-config-data\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.158947 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c601fe2-d854-42ef-885a-e2acc45f1607-log-httpd\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.170633 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-scripts\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.176231 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sphwp\" (UniqueName: \"kubernetes.io/projected/7c601fe2-d854-42ef-885a-e2acc45f1607-kube-api-access-sphwp\") pod \"ceilometer-0\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.249790 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk4qf\" (UniqueName: \"kubernetes.io/projected/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-kube-api-access-lk4qf\") pod \"neutron-db-sync-542cr\" (UID: \"ae5ec083-82fc-4a1b-826f-50536ee5fcd0\") " pod="openstack/neutron-db-sync-542cr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.249955 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-config-data\") pod \"placement-db-sync-bk2fx\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.250056 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf9m4\" (UniqueName: \"kubernetes.io/projected/d1a8f072-5116-43bc-9280-288e5eae7827-kube-api-access-wf9m4\") pod \"dnsmasq-dns-694dbb6647-8bdf9\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.250232 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-combined-ca-bundle\") pod \"neutron-db-sync-542cr\" (UID: \"ae5ec083-82fc-4a1b-826f-50536ee5fcd0\") " pod="openstack/neutron-db-sync-542cr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.250291 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-ovsdbserver-sb\") pod \"dnsmasq-dns-694dbb6647-8bdf9\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.250341 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-combined-ca-bundle\") pod \"placement-db-sync-bk2fx\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.250400 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-scripts\") pod \"placement-db-sync-bk2fx\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.250461 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-dns-svc\") pod \"dnsmasq-dns-694dbb6647-8bdf9\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.250530 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-logs\") pod \"placement-db-sync-bk2fx\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.250552 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f264a4d-92e3-4a69-a07f-00a3f0802484-combined-ca-bundle\") pod \"barbican-db-sync-dwfrr\" (UID: \"2f264a4d-92e3-4a69-a07f-00a3f0802484\") " pod="openstack/barbican-db-sync-dwfrr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.250628 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2f264a4d-92e3-4a69-a07f-00a3f0802484-db-sync-config-data\") pod \"barbican-db-sync-dwfrr\" (UID: \"2f264a4d-92e3-4a69-a07f-00a3f0802484\") " pod="openstack/barbican-db-sync-dwfrr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.250659 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggszd\" (UniqueName: \"kubernetes.io/projected/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-kube-api-access-ggszd\") pod \"placement-db-sync-bk2fx\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.250685 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-ovsdbserver-nb\") pod \"dnsmasq-dns-694dbb6647-8bdf9\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.250758 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-config\") pod \"dnsmasq-dns-694dbb6647-8bdf9\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.250788 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcl79\" (UniqueName: \"kubernetes.io/projected/2f264a4d-92e3-4a69-a07f-00a3f0802484-kube-api-access-xcl79\") pod \"barbican-db-sync-dwfrr\" (UID: \"2f264a4d-92e3-4a69-a07f-00a3f0802484\") " pod="openstack/barbican-db-sync-dwfrr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.250822 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-config\") pod \"neutron-db-sync-542cr\" (UID: \"ae5ec083-82fc-4a1b-826f-50536ee5fcd0\") " pod="openstack/neutron-db-sync-542cr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.252901 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-combined-ca-bundle\") pod \"neutron-db-sync-542cr\" (UID: \"ae5ec083-82fc-4a1b-826f-50536ee5fcd0\") " pod="openstack/neutron-db-sync-542cr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.252977 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-config-data\") pod \"placement-db-sync-bk2fx\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.253821 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-logs\") pod \"placement-db-sync-bk2fx\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.254167 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-combined-ca-bundle\") pod \"placement-db-sync-bk2fx\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.255084 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2f264a4d-92e3-4a69-a07f-00a3f0802484-db-sync-config-data\") pod \"barbican-db-sync-dwfrr\" (UID: \"2f264a4d-92e3-4a69-a07f-00a3f0802484\") " pod="openstack/barbican-db-sync-dwfrr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.255294 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-scripts\") pod \"placement-db-sync-bk2fx\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.255790 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f264a4d-92e3-4a69-a07f-00a3f0802484-combined-ca-bundle\") pod \"barbican-db-sync-dwfrr\" (UID: \"2f264a4d-92e3-4a69-a07f-00a3f0802484\") " pod="openstack/barbican-db-sync-dwfrr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.258690 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.259604 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-config\") pod \"neutron-db-sync-542cr\" (UID: \"ae5ec083-82fc-4a1b-826f-50536ee5fcd0\") " pod="openstack/neutron-db-sync-542cr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.267996 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk4qf\" (UniqueName: \"kubernetes.io/projected/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-kube-api-access-lk4qf\") pod \"neutron-db-sync-542cr\" (UID: \"ae5ec083-82fc-4a1b-826f-50536ee5fcd0\") " pod="openstack/neutron-db-sync-542cr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.281894 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.284256 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggszd\" (UniqueName: \"kubernetes.io/projected/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-kube-api-access-ggszd\") pod \"placement-db-sync-bk2fx\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.284820 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcl79\" (UniqueName: \"kubernetes.io/projected/2f264a4d-92e3-4a69-a07f-00a3f0802484-kube-api-access-xcl79\") pod \"barbican-db-sync-dwfrr\" (UID: \"2f264a4d-92e3-4a69-a07f-00a3f0802484\") " pod="openstack/barbican-db-sync-dwfrr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.285337 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.360759 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-ovsdbserver-nb\") pod \"dnsmasq-dns-694dbb6647-8bdf9\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.360837 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-config\") pod \"dnsmasq-dns-694dbb6647-8bdf9\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.360933 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf9m4\" (UniqueName: \"kubernetes.io/projected/d1a8f072-5116-43bc-9280-288e5eae7827-kube-api-access-wf9m4\") pod \"dnsmasq-dns-694dbb6647-8bdf9\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.360986 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-ovsdbserver-sb\") pod \"dnsmasq-dns-694dbb6647-8bdf9\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.361058 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-dns-svc\") pod \"dnsmasq-dns-694dbb6647-8bdf9\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.361900 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-dns-svc\") pod \"dnsmasq-dns-694dbb6647-8bdf9\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.362399 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-ovsdbserver-nb\") pod \"dnsmasq-dns-694dbb6647-8bdf9\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.364012 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-config\") pod \"dnsmasq-dns-694dbb6647-8bdf9\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.364704 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-ovsdbserver-sb\") pod \"dnsmasq-dns-694dbb6647-8bdf9\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.381147 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf9m4\" (UniqueName: \"kubernetes.io/projected/d1a8f072-5116-43bc-9280-288e5eae7827-kube-api-access-wf9m4\") pod \"dnsmasq-dns-694dbb6647-8bdf9\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.404167 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.455320 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-542cr" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.474491 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bk2fx" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.487606 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66dd8c6975-9229l"] Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.488386 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-dwfrr" Dec 13 07:01:37 crc kubenswrapper[4644]: W1213 07:01:37.489052 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d5ae84f_64d4_422a_8d2c_d6d37d6ea775.slice/crio-a540b87390fa2d24f3420fdffeda9eaccc8940acafd04cbada770a37bbc9aa8a WatchSource:0}: Error finding container a540b87390fa2d24f3420fdffeda9eaccc8940acafd04cbada770a37bbc9aa8a: Status 404 returned error can't find the container with id a540b87390fa2d24f3420fdffeda9eaccc8940acafd04cbada770a37bbc9aa8a Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.505413 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.565002 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-j5z52"] Dec 13 07:01:37 crc kubenswrapper[4644]: I1213 07:01:37.738795 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-55f676769c-f5b8d"] Dec 13 07:01:37 crc kubenswrapper[4644]: W1213 07:01:37.803710 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa16de04_2c8d_4421_8e23_c6dd519bf22d.slice/crio-e6a6882d51222f49c9ef30ccc036cdf5c16a146d112c00c80ac37fa70818cff9 WatchSource:0}: Error finding container e6a6882d51222f49c9ef30ccc036cdf5c16a146d112c00c80ac37fa70818cff9: Status 404 returned error can't find the container with id e6a6882d51222f49c9ef30ccc036cdf5c16a146d112c00c80ac37fa70818cff9 Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.054965 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.110650 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7fb68bc955-v6xbp"] Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.214400 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-fdgxf"] Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.263067 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-bk2fx"] Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.270567 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-542cr"] Dec 13 07:01:38 crc kubenswrapper[4644]: W1213 07:01:38.271100 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae5ec083_82fc_4a1b_826f_50536ee5fcd0.slice/crio-2a3d4a6e4e339ee29ef44dfff23f9a525540b0a5791149460c4197a7fc5edf67 WatchSource:0}: Error finding container 2a3d4a6e4e339ee29ef44dfff23f9a525540b0a5791149460c4197a7fc5edf67: Status 404 returned error can't find the container with id 2a3d4a6e4e339ee29ef44dfff23f9a525540b0a5791149460c4197a7fc5edf67 Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.310859 4644 generic.go:334] "Generic (PLEG): container finished" podID="3d5ae84f-64d4-422a-8d2c-d6d37d6ea775" containerID="72dbebd61e313d89ca774a8fb6cc5964e9a37a0414fa1a432596d9511fac7481" exitCode=0 Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.310953 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66dd8c6975-9229l" event={"ID":"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775","Type":"ContainerDied","Data":"72dbebd61e313d89ca774a8fb6cc5964e9a37a0414fa1a432596d9511fac7481"} Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.310983 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66dd8c6975-9229l" event={"ID":"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775","Type":"ContainerStarted","Data":"a540b87390fa2d24f3420fdffeda9eaccc8940acafd04cbada770a37bbc9aa8a"} Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.313010 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7fb68bc955-v6xbp" event={"ID":"c0a6ff01-7d9f-4725-96e0-82d5750b9635","Type":"ContainerStarted","Data":"17d66e29718284421bddf282b763edbd191da0c16dc990617659e899b478dac2"} Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.316359 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-fdgxf" event={"ID":"bf873768-3def-4a7c-b48b-fb3749f8c927","Type":"ContainerStarted","Data":"80761be2ae57769e5a9b5f1a83eb0f1a4b26a3a276e8fdd3d1e85bead962b705"} Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.317700 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-542cr" event={"ID":"ae5ec083-82fc-4a1b-826f-50536ee5fcd0","Type":"ContainerStarted","Data":"2a3d4a6e4e339ee29ef44dfff23f9a525540b0a5791149460c4197a7fc5edf67"} Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.318967 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55f676769c-f5b8d" event={"ID":"fa16de04-2c8d-4421-8e23-c6dd519bf22d","Type":"ContainerStarted","Data":"e6a6882d51222f49c9ef30ccc036cdf5c16a146d112c00c80ac37fa70818cff9"} Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.320226 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-j5z52" event={"ID":"dad101f4-3870-4a48-88da-892e436608cf","Type":"ContainerStarted","Data":"14d0d1d27761cb9349c57dbc6743ac185316082442fe023a224d9d2347d9978f"} Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.320262 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-j5z52" event={"ID":"dad101f4-3870-4a48-88da-892e436608cf","Type":"ContainerStarted","Data":"5b8efd0509cfadb63a711378c3c10fba48980521681bc1460d8a731b1d96862b"} Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.323744 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c601fe2-d854-42ef-885a-e2acc45f1607","Type":"ContainerStarted","Data":"af330df6fcc1b5120451e54749f718a1425d675b8e4e46fe73d601f3490a05b1"} Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.329107 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bk2fx" event={"ID":"2d3803d2-f0df-48c8-ba36-8a1cffdc262e","Type":"ContainerStarted","Data":"161d77c47902e56c758c087c45ff72afbc5ed6042ccb504a446028b38cefb20a"} Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.345142 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-j5z52" podStartSLOduration=2.3451263239999998 podStartE2EDuration="2.345126324s" podCreationTimestamp="2025-12-13 07:01:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:01:38.343049158 +0000 UTC m=+960.557999991" watchObservedRunningTime="2025-12-13 07:01:38.345126324 +0000 UTC m=+960.560077156" Dec 13 07:01:38 crc kubenswrapper[4644]: E1213 07:01:38.414688 4644 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/a2d93989bf7c35b5bfc3748f72341eeda3ab04708eb78a02e526e11b07a13b9a/diff" to get inode usage: stat /var/lib/containers/storage/overlay/a2d93989bf7c35b5bfc3748f72341eeda3ab04708eb78a02e526e11b07a13b9a/diff: no such file or directory, extraDiskErr: Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.440320 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-dwfrr"] Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.440350 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-694dbb6647-8bdf9"] Dec 13 07:01:38 crc kubenswrapper[4644]: E1213 07:01:38.663546 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47f5ebb6_8937_441f_9c9f_a1e31c401eed.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47f5ebb6_8937_441f_9c9f_a1e31c401eed.slice/crio-b8fc48432f72b692739947981613bbb69944b32e6c437e9a01d15154f417b1b5\": RecentStats: unable to find data in memory cache]" Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.673075 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.802342 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-ovsdbserver-sb\") pod \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.802407 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsws2\" (UniqueName: \"kubernetes.io/projected/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-kube-api-access-vsws2\") pod \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.802474 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-ovsdbserver-nb\") pod \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.802496 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-config\") pod \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.802592 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-dns-svc\") pod \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\" (UID: \"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775\") " Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.815892 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-kube-api-access-vsws2" (OuterVolumeSpecName: "kube-api-access-vsws2") pod "3d5ae84f-64d4-422a-8d2c-d6d37d6ea775" (UID: "3d5ae84f-64d4-422a-8d2c-d6d37d6ea775"). InnerVolumeSpecName "kube-api-access-vsws2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.825287 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3d5ae84f-64d4-422a-8d2c-d6d37d6ea775" (UID: "3d5ae84f-64d4-422a-8d2c-d6d37d6ea775"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.833123 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3d5ae84f-64d4-422a-8d2c-d6d37d6ea775" (UID: "3d5ae84f-64d4-422a-8d2c-d6d37d6ea775"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.842456 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-config" (OuterVolumeSpecName: "config") pod "3d5ae84f-64d4-422a-8d2c-d6d37d6ea775" (UID: "3d5ae84f-64d4-422a-8d2c-d6d37d6ea775"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.842650 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3d5ae84f-64d4-422a-8d2c-d6d37d6ea775" (UID: "3d5ae84f-64d4-422a-8d2c-d6d37d6ea775"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.904997 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsws2\" (UniqueName: \"kubernetes.io/projected/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-kube-api-access-vsws2\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.905030 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.905040 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.905049 4644 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:38 crc kubenswrapper[4644]: I1213 07:01:38.905057 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.342608 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-542cr" event={"ID":"ae5ec083-82fc-4a1b-826f-50536ee5fcd0","Type":"ContainerStarted","Data":"83ad15e74ba901753f87023a8a502c26d06b4f90b1a970597075c9488660170e"} Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.345170 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" event={"ID":"d1a8f072-5116-43bc-9280-288e5eae7827","Type":"ContainerDied","Data":"4a8cef6bdbe085f7bd133b5fdda36436f49c4b0bde5b61b7819d09072a8b4013"} Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.345338 4644 generic.go:334] "Generic (PLEG): container finished" podID="d1a8f072-5116-43bc-9280-288e5eae7827" containerID="4a8cef6bdbe085f7bd133b5fdda36436f49c4b0bde5b61b7819d09072a8b4013" exitCode=0 Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.345596 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" event={"ID":"d1a8f072-5116-43bc-9280-288e5eae7827","Type":"ContainerStarted","Data":"b5a619ca63388210ee9f085a035fc585cd4dd4ebb4a82f7bc67ca106b750cfe5"} Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.352872 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-dwfrr" event={"ID":"2f264a4d-92e3-4a69-a07f-00a3f0802484","Type":"ContainerStarted","Data":"2027dfc1de6129f87011101b2690ed9b2ce4a9d966cc7ad4191cae28d3984feb"} Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.359675 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66dd8c6975-9229l" Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.359663 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66dd8c6975-9229l" event={"ID":"3d5ae84f-64d4-422a-8d2c-d6d37d6ea775","Type":"ContainerDied","Data":"a540b87390fa2d24f3420fdffeda9eaccc8940acafd04cbada770a37bbc9aa8a"} Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.359916 4644 scope.go:117] "RemoveContainer" containerID="72dbebd61e313d89ca774a8fb6cc5964e9a37a0414fa1a432596d9511fac7481" Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.367615 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-542cr" podStartSLOduration=3.367589496 podStartE2EDuration="3.367589496s" podCreationTimestamp="2025-12-13 07:01:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:01:39.357151649 +0000 UTC m=+961.572102482" watchObservedRunningTime="2025-12-13 07:01:39.367589496 +0000 UTC m=+961.582540328" Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.510148 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66dd8c6975-9229l"] Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.535868 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-66dd8c6975-9229l"] Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.875438 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-55f676769c-f5b8d"] Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.911715 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-587d9954b7-hbjrt"] Dec 13 07:01:39 crc kubenswrapper[4644]: E1213 07:01:39.912058 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d5ae84f-64d4-422a-8d2c-d6d37d6ea775" containerName="init" Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.912073 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d5ae84f-64d4-422a-8d2c-d6d37d6ea775" containerName="init" Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.912254 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d5ae84f-64d4-422a-8d2c-d6d37d6ea775" containerName="init" Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.913052 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.929750 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-587d9954b7-hbjrt"] Dec 13 07:01:39 crc kubenswrapper[4644]: I1213 07:01:39.979496 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.047466 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vmpx\" (UniqueName: \"kubernetes.io/projected/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-kube-api-access-7vmpx\") pod \"horizon-587d9954b7-hbjrt\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.047555 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-horizon-secret-key\") pod \"horizon-587d9954b7-hbjrt\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.047597 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-config-data\") pod \"horizon-587d9954b7-hbjrt\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.047803 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-scripts\") pod \"horizon-587d9954b7-hbjrt\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.047930 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-logs\") pod \"horizon-587d9954b7-hbjrt\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.149545 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vmpx\" (UniqueName: \"kubernetes.io/projected/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-kube-api-access-7vmpx\") pod \"horizon-587d9954b7-hbjrt\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.149608 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-horizon-secret-key\") pod \"horizon-587d9954b7-hbjrt\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.149638 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-config-data\") pod \"horizon-587d9954b7-hbjrt\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.149782 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-scripts\") pod \"horizon-587d9954b7-hbjrt\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.149894 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-logs\") pod \"horizon-587d9954b7-hbjrt\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.150282 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-logs\") pod \"horizon-587d9954b7-hbjrt\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.151628 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-config-data\") pod \"horizon-587d9954b7-hbjrt\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.153108 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-scripts\") pod \"horizon-587d9954b7-hbjrt\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.155157 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-horizon-secret-key\") pod \"horizon-587d9954b7-hbjrt\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.171791 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vmpx\" (UniqueName: \"kubernetes.io/projected/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-kube-api-access-7vmpx\") pod \"horizon-587d9954b7-hbjrt\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.240243 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.383690 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" event={"ID":"d1a8f072-5116-43bc-9280-288e5eae7827","Type":"ContainerStarted","Data":"e416575821fc7ad7bddf27fe7c2d22cb3be5ea26a8834e1f70c9aa612967efeb"} Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.404740 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" podStartSLOduration=4.40470626 podStartE2EDuration="4.40470626s" podCreationTimestamp="2025-12-13 07:01:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:01:40.401640044 +0000 UTC m=+962.616590878" watchObservedRunningTime="2025-12-13 07:01:40.40470626 +0000 UTC m=+962.619657092" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.405153 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d5ae84f-64d4-422a-8d2c-d6d37d6ea775" path="/var/lib/kubelet/pods/3d5ae84f-64d4-422a-8d2c-d6d37d6ea775/volumes" Dec 13 07:01:40 crc kubenswrapper[4644]: I1213 07:01:40.710973 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-587d9954b7-hbjrt"] Dec 13 07:01:41 crc kubenswrapper[4644]: I1213 07:01:41.397467 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-587d9954b7-hbjrt" event={"ID":"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04","Type":"ContainerStarted","Data":"52c5f84fa3f80beafe2b9406ea7fa0586a9eb037d20726552ec86600a4364510"} Dec 13 07:01:41 crc kubenswrapper[4644]: I1213 07:01:41.400026 4644 generic.go:334] "Generic (PLEG): container finished" podID="dad101f4-3870-4a48-88da-892e436608cf" containerID="14d0d1d27761cb9349c57dbc6743ac185316082442fe023a224d9d2347d9978f" exitCode=0 Dec 13 07:01:41 crc kubenswrapper[4644]: I1213 07:01:41.400544 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-j5z52" event={"ID":"dad101f4-3870-4a48-88da-892e436608cf","Type":"ContainerDied","Data":"14d0d1d27761cb9349c57dbc6743ac185316082442fe023a224d9d2347d9978f"} Dec 13 07:01:41 crc kubenswrapper[4644]: I1213 07:01:41.400757 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:44 crc kubenswrapper[4644]: I1213 07:01:44.115028 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-skqjj"] Dec 13 07:01:44 crc kubenswrapper[4644]: I1213 07:01:44.118332 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:01:44 crc kubenswrapper[4644]: I1213 07:01:44.135237 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-skqjj"] Dec 13 07:01:44 crc kubenswrapper[4644]: I1213 07:01:44.242408 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z66nv\" (UniqueName: \"kubernetes.io/projected/c3526efa-a3f6-4ec1-8273-e302279281ba-kube-api-access-z66nv\") pod \"certified-operators-skqjj\" (UID: \"c3526efa-a3f6-4ec1-8273-e302279281ba\") " pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:01:44 crc kubenswrapper[4644]: I1213 07:01:44.242663 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3526efa-a3f6-4ec1-8273-e302279281ba-utilities\") pod \"certified-operators-skqjj\" (UID: \"c3526efa-a3f6-4ec1-8273-e302279281ba\") " pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:01:44 crc kubenswrapper[4644]: I1213 07:01:44.242740 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3526efa-a3f6-4ec1-8273-e302279281ba-catalog-content\") pod \"certified-operators-skqjj\" (UID: \"c3526efa-a3f6-4ec1-8273-e302279281ba\") " pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:01:44 crc kubenswrapper[4644]: I1213 07:01:44.344349 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3526efa-a3f6-4ec1-8273-e302279281ba-utilities\") pod \"certified-operators-skqjj\" (UID: \"c3526efa-a3f6-4ec1-8273-e302279281ba\") " pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:01:44 crc kubenswrapper[4644]: I1213 07:01:44.344459 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3526efa-a3f6-4ec1-8273-e302279281ba-catalog-content\") pod \"certified-operators-skqjj\" (UID: \"c3526efa-a3f6-4ec1-8273-e302279281ba\") " pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:01:44 crc kubenswrapper[4644]: I1213 07:01:44.344513 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z66nv\" (UniqueName: \"kubernetes.io/projected/c3526efa-a3f6-4ec1-8273-e302279281ba-kube-api-access-z66nv\") pod \"certified-operators-skqjj\" (UID: \"c3526efa-a3f6-4ec1-8273-e302279281ba\") " pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:01:44 crc kubenswrapper[4644]: I1213 07:01:44.344922 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3526efa-a3f6-4ec1-8273-e302279281ba-utilities\") pod \"certified-operators-skqjj\" (UID: \"c3526efa-a3f6-4ec1-8273-e302279281ba\") " pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:01:44 crc kubenswrapper[4644]: I1213 07:01:44.345032 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3526efa-a3f6-4ec1-8273-e302279281ba-catalog-content\") pod \"certified-operators-skqjj\" (UID: \"c3526efa-a3f6-4ec1-8273-e302279281ba\") " pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:01:44 crc kubenswrapper[4644]: I1213 07:01:44.364840 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z66nv\" (UniqueName: \"kubernetes.io/projected/c3526efa-a3f6-4ec1-8273-e302279281ba-kube-api-access-z66nv\") pod \"certified-operators-skqjj\" (UID: \"c3526efa-a3f6-4ec1-8273-e302279281ba\") " pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:01:44 crc kubenswrapper[4644]: I1213 07:01:44.443332 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.123871 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7fb68bc955-v6xbp"] Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.162658 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7dc5bbd594-tswgs"] Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.164040 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.168693 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.195578 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7dc5bbd594-tswgs"] Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.234945 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-587d9954b7-hbjrt"] Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.261121 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-668dddc65b-wlzwz"] Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.262054 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-horizon-secret-key\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.262162 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa9ae998-069d-4264-a6d3-2a8f51373524-scripts\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.262204 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hnqp\" (UniqueName: \"kubernetes.io/projected/fa9ae998-069d-4264-a6d3-2a8f51373524-kube-api-access-8hnqp\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.262248 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa9ae998-069d-4264-a6d3-2a8f51373524-logs\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.262312 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.262320 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa9ae998-069d-4264-a6d3-2a8f51373524-config-data\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.262370 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-combined-ca-bundle\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.262401 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-horizon-tls-certs\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.275661 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-668dddc65b-wlzwz"] Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.364172 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7d37a2f-8117-4d49-8e28-f06339a276cf-combined-ca-bundle\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.364232 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa9ae998-069d-4264-a6d3-2a8f51373524-config-data\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.364316 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b7d37a2f-8117-4d49-8e28-f06339a276cf-scripts\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.364333 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-combined-ca-bundle\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.364538 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7q4l\" (UniqueName: \"kubernetes.io/projected/b7d37a2f-8117-4d49-8e28-f06339a276cf-kube-api-access-g7q4l\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.364569 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-horizon-tls-certs\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.364611 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-horizon-secret-key\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.364646 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b7d37a2f-8117-4d49-8e28-f06339a276cf-config-data\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.364705 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa9ae998-069d-4264-a6d3-2a8f51373524-scripts\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.364749 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7d37a2f-8117-4d49-8e28-f06339a276cf-logs\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.364772 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hnqp\" (UniqueName: \"kubernetes.io/projected/fa9ae998-069d-4264-a6d3-2a8f51373524-kube-api-access-8hnqp\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.364817 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa9ae998-069d-4264-a6d3-2a8f51373524-logs\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.364840 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b7d37a2f-8117-4d49-8e28-f06339a276cf-horizon-secret-key\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.364868 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7d37a2f-8117-4d49-8e28-f06339a276cf-horizon-tls-certs\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.365388 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa9ae998-069d-4264-a6d3-2a8f51373524-logs\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.365632 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa9ae998-069d-4264-a6d3-2a8f51373524-scripts\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.367082 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa9ae998-069d-4264-a6d3-2a8f51373524-config-data\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.372063 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-horizon-secret-key\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.372121 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-horizon-tls-certs\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.372788 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-combined-ca-bundle\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.382342 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hnqp\" (UniqueName: \"kubernetes.io/projected/fa9ae998-069d-4264-a6d3-2a8f51373524-kube-api-access-8hnqp\") pod \"horizon-7dc5bbd594-tswgs\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.466388 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b7d37a2f-8117-4d49-8e28-f06339a276cf-scripts\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.466455 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7q4l\" (UniqueName: \"kubernetes.io/projected/b7d37a2f-8117-4d49-8e28-f06339a276cf-kube-api-access-g7q4l\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.466531 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b7d37a2f-8117-4d49-8e28-f06339a276cf-config-data\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.466626 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7d37a2f-8117-4d49-8e28-f06339a276cf-logs\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.466692 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b7d37a2f-8117-4d49-8e28-f06339a276cf-horizon-secret-key\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.466719 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7d37a2f-8117-4d49-8e28-f06339a276cf-horizon-tls-certs\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.466785 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7d37a2f-8117-4d49-8e28-f06339a276cf-combined-ca-bundle\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.468494 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b7d37a2f-8117-4d49-8e28-f06339a276cf-scripts\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.468584 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7d37a2f-8117-4d49-8e28-f06339a276cf-logs\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.469472 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b7d37a2f-8117-4d49-8e28-f06339a276cf-config-data\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.470189 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7d37a2f-8117-4d49-8e28-f06339a276cf-combined-ca-bundle\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.472815 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b7d37a2f-8117-4d49-8e28-f06339a276cf-horizon-tls-certs\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.480497 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b7d37a2f-8117-4d49-8e28-f06339a276cf-horizon-secret-key\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.482112 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.485128 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7q4l\" (UniqueName: \"kubernetes.io/projected/b7d37a2f-8117-4d49-8e28-f06339a276cf-kube-api-access-g7q4l\") pod \"horizon-668dddc65b-wlzwz\" (UID: \"b7d37a2f-8117-4d49-8e28-f06339a276cf\") " pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:45 crc kubenswrapper[4644]: I1213 07:01:45.577072 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.087841 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.178660 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-combined-ca-bundle\") pod \"dad101f4-3870-4a48-88da-892e436608cf\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.178744 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-config-data\") pod \"dad101f4-3870-4a48-88da-892e436608cf\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.178764 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-scripts\") pod \"dad101f4-3870-4a48-88da-892e436608cf\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.178814 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-credential-keys\") pod \"dad101f4-3870-4a48-88da-892e436608cf\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.178897 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spxjr\" (UniqueName: \"kubernetes.io/projected/dad101f4-3870-4a48-88da-892e436608cf-kube-api-access-spxjr\") pod \"dad101f4-3870-4a48-88da-892e436608cf\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.178950 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-fernet-keys\") pod \"dad101f4-3870-4a48-88da-892e436608cf\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.182228 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "dad101f4-3870-4a48-88da-892e436608cf" (UID: "dad101f4-3870-4a48-88da-892e436608cf"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.183231 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "dad101f4-3870-4a48-88da-892e436608cf" (UID: "dad101f4-3870-4a48-88da-892e436608cf"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.183596 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-scripts" (OuterVolumeSpecName: "scripts") pod "dad101f4-3870-4a48-88da-892e436608cf" (UID: "dad101f4-3870-4a48-88da-892e436608cf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.184007 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dad101f4-3870-4a48-88da-892e436608cf-kube-api-access-spxjr" (OuterVolumeSpecName: "kube-api-access-spxjr") pod "dad101f4-3870-4a48-88da-892e436608cf" (UID: "dad101f4-3870-4a48-88da-892e436608cf"). InnerVolumeSpecName "kube-api-access-spxjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:01:46 crc kubenswrapper[4644]: E1213 07:01:46.198194 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-combined-ca-bundle podName:dad101f4-3870-4a48-88da-892e436608cf nodeName:}" failed. No retries permitted until 2025-12-13 07:01:46.698171533 +0000 UTC m=+968.913122365 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-combined-ca-bundle") pod "dad101f4-3870-4a48-88da-892e436608cf" (UID: "dad101f4-3870-4a48-88da-892e436608cf") : error deleting /var/lib/kubelet/pods/dad101f4-3870-4a48-88da-892e436608cf/volume-subpaths: remove /var/lib/kubelet/pods/dad101f4-3870-4a48-88da-892e436608cf/volume-subpaths: no such file or directory Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.201941 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-config-data" (OuterVolumeSpecName: "config-data") pod "dad101f4-3870-4a48-88da-892e436608cf" (UID: "dad101f4-3870-4a48-88da-892e436608cf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.280851 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spxjr\" (UniqueName: \"kubernetes.io/projected/dad101f4-3870-4a48-88da-892e436608cf-kube-api-access-spxjr\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.280883 4644 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.280892 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.280899 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.280908 4644 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.443970 4644 generic.go:334] "Generic (PLEG): container finished" podID="ae5ec083-82fc-4a1b-826f-50536ee5fcd0" containerID="83ad15e74ba901753f87023a8a502c26d06b4f90b1a970597075c9488660170e" exitCode=0 Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.444035 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-542cr" event={"ID":"ae5ec083-82fc-4a1b-826f-50536ee5fcd0","Type":"ContainerDied","Data":"83ad15e74ba901753f87023a8a502c26d06b4f90b1a970597075c9488660170e"} Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.445268 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-j5z52" event={"ID":"dad101f4-3870-4a48-88da-892e436608cf","Type":"ContainerDied","Data":"5b8efd0509cfadb63a711378c3c10fba48980521681bc1460d8a731b1d96862b"} Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.445349 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b8efd0509cfadb63a711378c3c10fba48980521681bc1460d8a731b1d96862b" Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.445424 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-j5z52" Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.791522 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-combined-ca-bundle\") pod \"dad101f4-3870-4a48-88da-892e436608cf\" (UID: \"dad101f4-3870-4a48-88da-892e436608cf\") " Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.795790 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dad101f4-3870-4a48-88da-892e436608cf" (UID: "dad101f4-3870-4a48-88da-892e436608cf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:01:46 crc kubenswrapper[4644]: I1213 07:01:46.894098 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dad101f4-3870-4a48-88da-892e436608cf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.150206 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-j5z52"] Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.158177 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-j5z52"] Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.258660 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-gp49b"] Dec 13 07:01:47 crc kubenswrapper[4644]: E1213 07:01:47.259060 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dad101f4-3870-4a48-88da-892e436608cf" containerName="keystone-bootstrap" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.259077 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="dad101f4-3870-4a48-88da-892e436608cf" containerName="keystone-bootstrap" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.259247 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="dad101f4-3870-4a48-88da-892e436608cf" containerName="keystone-bootstrap" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.259854 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.261834 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.264404 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.264427 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.265012 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-5hlkk" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.267541 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-gp49b"] Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.270163 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.402971 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-config-data\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.403167 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-combined-ca-bundle\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.403243 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fs6n4\" (UniqueName: \"kubernetes.io/projected/02c6c85f-c08e-4317-b3fc-35689c19bade-kube-api-access-fs6n4\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.403352 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-fernet-keys\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.403584 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-scripts\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.403623 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-credential-keys\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.504950 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-combined-ca-bundle\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.505004 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fs6n4\" (UniqueName: \"kubernetes.io/projected/02c6c85f-c08e-4317-b3fc-35689c19bade-kube-api-access-fs6n4\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.505067 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-fernet-keys\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.505188 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-scripts\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.505207 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-credential-keys\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.505298 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-config-data\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.507206 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.509594 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-config-data\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.513101 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-scripts\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.515716 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-combined-ca-bundle\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.518098 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-credential-keys\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.518519 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-fernet-keys\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.518806 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fs6n4\" (UniqueName: \"kubernetes.io/projected/02c6c85f-c08e-4317-b3fc-35689c19bade-kube-api-access-fs6n4\") pod \"keystone-bootstrap-gp49b\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.555311 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bc56d6f79-jjz79"] Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.555544 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" podUID="bc9ebb37-f21c-4029-a614-ef0203c99d34" containerName="dnsmasq-dns" containerID="cri-o://33424c46aa834c4c129fec50f45a9b4229c4fa5175b84b4a36f663a884dcb7a5" gracePeriod=10 Dec 13 07:01:47 crc kubenswrapper[4644]: I1213 07:01:47.590075 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:01:48 crc kubenswrapper[4644]: I1213 07:01:48.406862 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dad101f4-3870-4a48-88da-892e436608cf" path="/var/lib/kubelet/pods/dad101f4-3870-4a48-88da-892e436608cf/volumes" Dec 13 07:01:48 crc kubenswrapper[4644]: I1213 07:01:48.459401 4644 generic.go:334] "Generic (PLEG): container finished" podID="bc9ebb37-f21c-4029-a614-ef0203c99d34" containerID="33424c46aa834c4c129fec50f45a9b4229c4fa5175b84b4a36f663a884dcb7a5" exitCode=0 Dec 13 07:01:48 crc kubenswrapper[4644]: I1213 07:01:48.459459 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" event={"ID":"bc9ebb37-f21c-4029-a614-ef0203c99d34","Type":"ContainerDied","Data":"33424c46aa834c4c129fec50f45a9b4229c4fa5175b84b4a36f663a884dcb7a5"} Dec 13 07:01:49 crc kubenswrapper[4644]: I1213 07:01:49.712018 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" podUID="bc9ebb37-f21c-4029-a614-ef0203c99d34" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.119:5353: connect: connection refused" Dec 13 07:01:52 crc kubenswrapper[4644]: E1213 07:01:52.339280 4644 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7" Dec 13 07:01:52 crc kubenswrapper[4644]: E1213 07:01:52.339599 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n657hdbh68bh86h54ch5fh594h5f8hf7h97h55ch67h646h67fh569h577h79h5c8hd9h698h586h68fh559h56ch59dh5f5h95h589h69h5f6h596h698q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bwzjw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7fb68bc955-v6xbp_openstack(c0a6ff01-7d9f-4725-96e0-82d5750b9635): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 07:01:52 crc kubenswrapper[4644]: E1213 07:01:52.341389 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7\\\"\"]" pod="openstack/horizon-7fb68bc955-v6xbp" podUID="c0a6ff01-7d9f-4725-96e0-82d5750b9635" Dec 13 07:01:54 crc kubenswrapper[4644]: I1213 07:01:54.711807 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" podUID="bc9ebb37-f21c-4029-a614-ef0203c99d34" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.119:5353: connect: connection refused" Dec 13 07:01:58 crc kubenswrapper[4644]: E1213 07:01:58.528043 4644 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7" Dec 13 07:01:58 crc kubenswrapper[4644]: E1213 07:01:58.528499 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nf5h5fbhb4h574h65fh5cbh599h547h66h5c6h6fh657h5c5h5b7h547h56dh555h65ch585hb8h57fh59dh67h55h5f4h8bh5f4h668h67h9ch76h8bq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7vmpx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-587d9954b7-hbjrt_openstack(4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 07:01:58 crc kubenswrapper[4644]: E1213 07:01:58.529317 4644 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7" Dec 13 07:01:58 crc kubenswrapper[4644]: E1213 07:01:58.529592 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5b5h587h6h545h658h5ffh5f5h669h9hcbh6ch54chb6hcch5cbh85h579h68fh67fh655h9h5b8h5c6h655h5d6h6dh5c4h58dh67h97h694h596q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tgslq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-55f676769c-f5b8d_openstack(fa16de04-2c8d-4421-8e23-c6dd519bf22d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 07:01:58 crc kubenswrapper[4644]: E1213 07:01:58.539315 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7\\\"\"]" pod="openstack/horizon-587d9954b7-hbjrt" podUID="4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04" Dec 13 07:01:58 crc kubenswrapper[4644]: E1213 07:01:58.539574 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7\\\"\"]" pod="openstack/horizon-55f676769c-f5b8d" podUID="fa16de04-2c8d-4421-8e23-c6dd519bf22d" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:01:59.938496 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-542cr" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:01:59.946421 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.039615 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lk4qf\" (UniqueName: \"kubernetes.io/projected/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-kube-api-access-lk4qf\") pod \"ae5ec083-82fc-4a1b-826f-50536ee5fcd0\" (UID: \"ae5ec083-82fc-4a1b-826f-50536ee5fcd0\") " Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.039710 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwzjw\" (UniqueName: \"kubernetes.io/projected/c0a6ff01-7d9f-4725-96e0-82d5750b9635-kube-api-access-bwzjw\") pod \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.039806 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0a6ff01-7d9f-4725-96e0-82d5750b9635-horizon-secret-key\") pod \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.039837 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-config\") pod \"ae5ec083-82fc-4a1b-826f-50536ee5fcd0\" (UID: \"ae5ec083-82fc-4a1b-826f-50536ee5fcd0\") " Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.040580 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0a6ff01-7d9f-4725-96e0-82d5750b9635-config-data\") pod \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.040679 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0a6ff01-7d9f-4725-96e0-82d5750b9635-scripts\") pod \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.040723 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0a6ff01-7d9f-4725-96e0-82d5750b9635-logs\") pod \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\" (UID: \"c0a6ff01-7d9f-4725-96e0-82d5750b9635\") " Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.040797 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-combined-ca-bundle\") pod \"ae5ec083-82fc-4a1b-826f-50536ee5fcd0\" (UID: \"ae5ec083-82fc-4a1b-826f-50536ee5fcd0\") " Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.041508 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0a6ff01-7d9f-4725-96e0-82d5750b9635-config-data" (OuterVolumeSpecName: "config-data") pod "c0a6ff01-7d9f-4725-96e0-82d5750b9635" (UID: "c0a6ff01-7d9f-4725-96e0-82d5750b9635"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.042074 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0a6ff01-7d9f-4725-96e0-82d5750b9635-logs" (OuterVolumeSpecName: "logs") pod "c0a6ff01-7d9f-4725-96e0-82d5750b9635" (UID: "c0a6ff01-7d9f-4725-96e0-82d5750b9635"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.042189 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0a6ff01-7d9f-4725-96e0-82d5750b9635-scripts" (OuterVolumeSpecName: "scripts") pod "c0a6ff01-7d9f-4725-96e0-82d5750b9635" (UID: "c0a6ff01-7d9f-4725-96e0-82d5750b9635"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.044993 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0a6ff01-7d9f-4725-96e0-82d5750b9635-kube-api-access-bwzjw" (OuterVolumeSpecName: "kube-api-access-bwzjw") pod "c0a6ff01-7d9f-4725-96e0-82d5750b9635" (UID: "c0a6ff01-7d9f-4725-96e0-82d5750b9635"). InnerVolumeSpecName "kube-api-access-bwzjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.045507 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0a6ff01-7d9f-4725-96e0-82d5750b9635-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "c0a6ff01-7d9f-4725-96e0-82d5750b9635" (UID: "c0a6ff01-7d9f-4725-96e0-82d5750b9635"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.046240 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-kube-api-access-lk4qf" (OuterVolumeSpecName: "kube-api-access-lk4qf") pod "ae5ec083-82fc-4a1b-826f-50536ee5fcd0" (UID: "ae5ec083-82fc-4a1b-826f-50536ee5fcd0"). InnerVolumeSpecName "kube-api-access-lk4qf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.060600 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-config" (OuterVolumeSpecName: "config") pod "ae5ec083-82fc-4a1b-826f-50536ee5fcd0" (UID: "ae5ec083-82fc-4a1b-826f-50536ee5fcd0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.077723 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae5ec083-82fc-4a1b-826f-50536ee5fcd0" (UID: "ae5ec083-82fc-4a1b-826f-50536ee5fcd0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.144778 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwzjw\" (UniqueName: \"kubernetes.io/projected/c0a6ff01-7d9f-4725-96e0-82d5750b9635-kube-api-access-bwzjw\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.144803 4644 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c0a6ff01-7d9f-4725-96e0-82d5750b9635-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.144830 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.144839 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0a6ff01-7d9f-4725-96e0-82d5750b9635-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.144848 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0a6ff01-7d9f-4725-96e0-82d5750b9635-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.144856 4644 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0a6ff01-7d9f-4725-96e0-82d5750b9635-logs\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.144864 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.144872 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lk4qf\" (UniqueName: \"kubernetes.io/projected/ae5ec083-82fc-4a1b-826f-50536ee5fcd0-kube-api-access-lk4qf\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.544700 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7fb68bc955-v6xbp" event={"ID":"c0a6ff01-7d9f-4725-96e0-82d5750b9635","Type":"ContainerDied","Data":"17d66e29718284421bddf282b763edbd191da0c16dc990617659e899b478dac2"} Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.544743 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7fb68bc955-v6xbp" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.546483 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-542cr" event={"ID":"ae5ec083-82fc-4a1b-826f-50536ee5fcd0","Type":"ContainerDied","Data":"2a3d4a6e4e339ee29ef44dfff23f9a525540b0a5791149460c4197a7fc5edf67"} Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.546562 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a3d4a6e4e339ee29ef44dfff23f9a525540b0a5791149460c4197a7fc5edf67" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.546508 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-542cr" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.586467 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7fb68bc955-v6xbp"] Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.591834 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7fb68bc955-v6xbp"] Dec 13 07:02:00 crc kubenswrapper[4644]: E1213 07:02:00.801360 4644 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49" Dec 13 07:02:00 crc kubenswrapper[4644]: E1213 07:02:00.801517 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gg5pv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-fdgxf_openstack(bf873768-3def-4a7c-b48b-fb3749f8c927): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 07:02:00 crc kubenswrapper[4644]: E1213 07:02:00.803005 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-fdgxf" podUID="bf873768-3def-4a7c-b48b-fb3749f8c927" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.907946 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.914159 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:02:00 crc kubenswrapper[4644]: I1213 07:02:00.920557 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.086709 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f69459849-4h2fh"] Dec 13 07:02:01 crc kubenswrapper[4644]: E1213 07:02:01.087089 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae5ec083-82fc-4a1b-826f-50536ee5fcd0" containerName="neutron-db-sync" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.087110 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae5ec083-82fc-4a1b-826f-50536ee5fcd0" containerName="neutron-db-sync" Dec 13 07:02:01 crc kubenswrapper[4644]: E1213 07:02:01.087118 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc9ebb37-f21c-4029-a614-ef0203c99d34" containerName="dnsmasq-dns" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.087124 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc9ebb37-f21c-4029-a614-ef0203c99d34" containerName="dnsmasq-dns" Dec 13 07:02:01 crc kubenswrapper[4644]: E1213 07:02:01.087137 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc9ebb37-f21c-4029-a614-ef0203c99d34" containerName="init" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.087142 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc9ebb37-f21c-4029-a614-ef0203c99d34" containerName="init" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.087332 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae5ec083-82fc-4a1b-826f-50536ee5fcd0" containerName="neutron-db-sync" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.087348 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc9ebb37-f21c-4029-a614-ef0203c99d34" containerName="dnsmasq-dns" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.088180 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105127 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-ovsdbserver-nb\") pod \"bc9ebb37-f21c-4029-a614-ef0203c99d34\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105200 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa16de04-2c8d-4421-8e23-c6dd519bf22d-config-data\") pod \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105230 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa16de04-2c8d-4421-8e23-c6dd519bf22d-logs\") pod \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105256 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-horizon-secret-key\") pod \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105285 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgslq\" (UniqueName: \"kubernetes.io/projected/fa16de04-2c8d-4421-8e23-c6dd519bf22d-kube-api-access-tgslq\") pod \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105316 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vmpx\" (UniqueName: \"kubernetes.io/projected/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-kube-api-access-7vmpx\") pod \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105337 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-scripts\") pod \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105362 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jnvp\" (UniqueName: \"kubernetes.io/projected/bc9ebb37-f21c-4029-a614-ef0203c99d34-kube-api-access-6jnvp\") pod \"bc9ebb37-f21c-4029-a614-ef0203c99d34\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105376 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa16de04-2c8d-4421-8e23-c6dd519bf22d-scripts\") pod \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105496 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-config\") pod \"bc9ebb37-f21c-4029-a614-ef0203c99d34\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105514 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-dns-svc\") pod \"bc9ebb37-f21c-4029-a614-ef0203c99d34\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105545 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-ovsdbserver-sb\") pod \"bc9ebb37-f21c-4029-a614-ef0203c99d34\" (UID: \"bc9ebb37-f21c-4029-a614-ef0203c99d34\") " Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105570 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fa16de04-2c8d-4421-8e23-c6dd519bf22d-horizon-secret-key\") pod \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\" (UID: \"fa16de04-2c8d-4421-8e23-c6dd519bf22d\") " Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105592 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-config-data\") pod \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105611 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-logs\") pod \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\" (UID: \"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04\") " Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.105694 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa16de04-2c8d-4421-8e23-c6dd519bf22d-logs" (OuterVolumeSpecName: "logs") pod "fa16de04-2c8d-4421-8e23-c6dd519bf22d" (UID: "fa16de04-2c8d-4421-8e23-c6dd519bf22d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.106218 4644 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa16de04-2c8d-4421-8e23-c6dd519bf22d-logs\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.106518 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-logs" (OuterVolumeSpecName: "logs") pod "4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04" (UID: "4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.110247 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f69459849-4h2fh"] Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.110813 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa16de04-2c8d-4421-8e23-c6dd519bf22d-config-data" (OuterVolumeSpecName: "config-data") pod "fa16de04-2c8d-4421-8e23-c6dd519bf22d" (UID: "fa16de04-2c8d-4421-8e23-c6dd519bf22d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.111223 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa16de04-2c8d-4421-8e23-c6dd519bf22d-scripts" (OuterVolumeSpecName: "scripts") pod "fa16de04-2c8d-4421-8e23-c6dd519bf22d" (UID: "fa16de04-2c8d-4421-8e23-c6dd519bf22d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.113097 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-scripts" (OuterVolumeSpecName: "scripts") pod "4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04" (UID: "4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.114264 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-config-data" (OuterVolumeSpecName: "config-data") pod "4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04" (UID: "4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.119930 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc9ebb37-f21c-4029-a614-ef0203c99d34-kube-api-access-6jnvp" (OuterVolumeSpecName: "kube-api-access-6jnvp") pod "bc9ebb37-f21c-4029-a614-ef0203c99d34" (UID: "bc9ebb37-f21c-4029-a614-ef0203c99d34"). InnerVolumeSpecName "kube-api-access-6jnvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.120178 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa16de04-2c8d-4421-8e23-c6dd519bf22d-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "fa16de04-2c8d-4421-8e23-c6dd519bf22d" (UID: "fa16de04-2c8d-4421-8e23-c6dd519bf22d"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.120568 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-kube-api-access-7vmpx" (OuterVolumeSpecName: "kube-api-access-7vmpx") pod "4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04" (UID: "4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04"). InnerVolumeSpecName "kube-api-access-7vmpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.120923 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa16de04-2c8d-4421-8e23-c6dd519bf22d-kube-api-access-tgslq" (OuterVolumeSpecName: "kube-api-access-tgslq") pod "fa16de04-2c8d-4421-8e23-c6dd519bf22d" (UID: "fa16de04-2c8d-4421-8e23-c6dd519bf22d"). InnerVolumeSpecName "kube-api-access-tgslq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.131506 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04" (UID: "4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.166130 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bc9ebb37-f21c-4029-a614-ef0203c99d34" (UID: "bc9ebb37-f21c-4029-a614-ef0203c99d34"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.167431 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bc9ebb37-f21c-4029-a614-ef0203c99d34" (UID: "bc9ebb37-f21c-4029-a614-ef0203c99d34"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.174539 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bc9ebb37-f21c-4029-a614-ef0203c99d34" (UID: "bc9ebb37-f21c-4029-a614-ef0203c99d34"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.176214 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-config" (OuterVolumeSpecName: "config") pod "bc9ebb37-f21c-4029-a614-ef0203c99d34" (UID: "bc9ebb37-f21c-4029-a614-ef0203c99d34"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.208898 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-dns-svc\") pod \"dnsmasq-dns-7f69459849-4h2fh\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.208974 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ttrh\" (UniqueName: \"kubernetes.io/projected/e0607341-6c2d-41db-8350-17f32d48aedc-kube-api-access-5ttrh\") pod \"dnsmasq-dns-7f69459849-4h2fh\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209055 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-ovsdbserver-nb\") pod \"dnsmasq-dns-7f69459849-4h2fh\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209135 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-config\") pod \"dnsmasq-dns-7f69459849-4h2fh\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209206 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-ovsdbserver-sb\") pod \"dnsmasq-dns-7f69459849-4h2fh\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209270 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209283 4644 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209291 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209299 4644 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fa16de04-2c8d-4421-8e23-c6dd519bf22d-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209308 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209317 4644 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-logs\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209327 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc9ebb37-f21c-4029-a614-ef0203c99d34-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209336 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa16de04-2c8d-4421-8e23-c6dd519bf22d-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209343 4644 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209352 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgslq\" (UniqueName: \"kubernetes.io/projected/fa16de04-2c8d-4421-8e23-c6dd519bf22d-kube-api-access-tgslq\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209360 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vmpx\" (UniqueName: \"kubernetes.io/projected/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-kube-api-access-7vmpx\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209368 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209376 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jnvp\" (UniqueName: \"kubernetes.io/projected/bc9ebb37-f21c-4029-a614-ef0203c99d34-kube-api-access-6jnvp\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.209385 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa16de04-2c8d-4421-8e23-c6dd519bf22d-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.275507 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-85586f897b-4b47l"] Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.276901 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.280781 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.281017 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-qj6rp" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.282415 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.286609 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.291491 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-668dddc65b-wlzwz"] Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.296384 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-85586f897b-4b47l"] Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.310767 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-ovsdbserver-sb\") pod \"dnsmasq-dns-7f69459849-4h2fh\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.310914 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-dns-svc\") pod \"dnsmasq-dns-7f69459849-4h2fh\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.310969 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ttrh\" (UniqueName: \"kubernetes.io/projected/e0607341-6c2d-41db-8350-17f32d48aedc-kube-api-access-5ttrh\") pod \"dnsmasq-dns-7f69459849-4h2fh\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.311063 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-ovsdbserver-nb\") pod \"dnsmasq-dns-7f69459849-4h2fh\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.311110 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-config\") pod \"dnsmasq-dns-7f69459849-4h2fh\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.321164 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-config\") pod \"dnsmasq-dns-7f69459849-4h2fh\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.323334 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-dns-svc\") pod \"dnsmasq-dns-7f69459849-4h2fh\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.323726 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-ovsdbserver-nb\") pod \"dnsmasq-dns-7f69459849-4h2fh\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.332674 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-ovsdbserver-sb\") pod \"dnsmasq-dns-7f69459849-4h2fh\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.345031 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ttrh\" (UniqueName: \"kubernetes.io/projected/e0607341-6c2d-41db-8350-17f32d48aedc-kube-api-access-5ttrh\") pod \"dnsmasq-dns-7f69459849-4h2fh\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.406037 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.412602 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-config\") pod \"neutron-85586f897b-4b47l\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.412663 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-ovndb-tls-certs\") pod \"neutron-85586f897b-4b47l\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.412744 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-combined-ca-bundle\") pod \"neutron-85586f897b-4b47l\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.412791 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-np97m\" (UniqueName: \"kubernetes.io/projected/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-kube-api-access-np97m\") pod \"neutron-85586f897b-4b47l\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.412878 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-httpd-config\") pod \"neutron-85586f897b-4b47l\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.514197 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-combined-ca-bundle\") pod \"neutron-85586f897b-4b47l\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.514274 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-np97m\" (UniqueName: \"kubernetes.io/projected/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-kube-api-access-np97m\") pod \"neutron-85586f897b-4b47l\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.514457 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-httpd-config\") pod \"neutron-85586f897b-4b47l\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.514499 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-config\") pod \"neutron-85586f897b-4b47l\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.514545 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-ovndb-tls-certs\") pod \"neutron-85586f897b-4b47l\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.518554 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-combined-ca-bundle\") pod \"neutron-85586f897b-4b47l\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.518630 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-httpd-config\") pod \"neutron-85586f897b-4b47l\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.519366 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-config\") pod \"neutron-85586f897b-4b47l\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.519822 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-ovndb-tls-certs\") pod \"neutron-85586f897b-4b47l\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.530689 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-np97m\" (UniqueName: \"kubernetes.io/projected/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-kube-api-access-np97m\") pod \"neutron-85586f897b-4b47l\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.557748 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55f676769c-f5b8d" event={"ID":"fa16de04-2c8d-4421-8e23-c6dd519bf22d","Type":"ContainerDied","Data":"e6a6882d51222f49c9ef30ccc036cdf5c16a146d112c00c80ac37fa70818cff9"} Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.557811 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55f676769c-f5b8d" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.561867 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" event={"ID":"bc9ebb37-f21c-4029-a614-ef0203c99d34","Type":"ContainerDied","Data":"acc0b16d7dca3dd767d820e3ea687712007990221a099fd0ffdaaf0134fb81a4"} Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.561918 4644 scope.go:117] "RemoveContainer" containerID="33424c46aa834c4c129fec50f45a9b4229c4fa5175b84b4a36f663a884dcb7a5" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.562053 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.568169 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-587d9954b7-hbjrt" event={"ID":"4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04","Type":"ContainerDied","Data":"52c5f84fa3f80beafe2b9406ea7fa0586a9eb037d20726552ec86600a4364510"} Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.568413 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-587d9954b7-hbjrt" Dec 13 07:02:01 crc kubenswrapper[4644]: E1213 07:02:01.571240 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49\\\"\"" pod="openstack/cinder-db-sync-fdgxf" podUID="bf873768-3def-4a7c-b48b-fb3749f8c927" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.638793 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-587d9954b7-hbjrt"] Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.650921 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-587d9954b7-hbjrt"] Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.651429 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.678360 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-55f676769c-f5b8d"] Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.685674 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-55f676769c-f5b8d"] Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.692059 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bc56d6f79-jjz79"] Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.697924 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bc56d6f79-jjz79"] Dec 13 07:02:01 crc kubenswrapper[4644]: E1213 07:02:01.851218 4644 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16" Dec 13 07:02:01 crc kubenswrapper[4644]: E1213 07:02:01.851613 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xcl79,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-dwfrr_openstack(2f264a4d-92e3-4a69-a07f-00a3f0802484): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 13 07:02:01 crc kubenswrapper[4644]: E1213 07:02:01.853072 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-dwfrr" podUID="2f264a4d-92e3-4a69-a07f-00a3f0802484" Dec 13 07:02:01 crc kubenswrapper[4644]: W1213 07:02:01.857183 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7d37a2f_8117_4d49_8e28_f06339a276cf.slice/crio-59b9ee31e19372047e593ceaa0fb9d19d5e6010ffdc80da1ccb5184aeff9538b WatchSource:0}: Error finding container 59b9ee31e19372047e593ceaa0fb9d19d5e6010ffdc80da1ccb5184aeff9538b: Status 404 returned error can't find the container with id 59b9ee31e19372047e593ceaa0fb9d19d5e6010ffdc80da1ccb5184aeff9538b Dec 13 07:02:01 crc kubenswrapper[4644]: I1213 07:02:01.865082 4644 scope.go:117] "RemoveContainer" containerID="f6b1abd71872c5f3d463762c134739ad22c075e1e33797e37bb1d3806a0c9232" Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.299936 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-gp49b"] Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.311632 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-skqjj"] Dec 13 07:02:02 crc kubenswrapper[4644]: W1213 07:02:02.312389 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02c6c85f_c08e_4317_b3fc_35689c19bade.slice/crio-ab4b7a35110a61f32ba020a84d7e1d7624eb02a55e9ad1e39845d215bbbcb828 WatchSource:0}: Error finding container ab4b7a35110a61f32ba020a84d7e1d7624eb02a55e9ad1e39845d215bbbcb828: Status 404 returned error can't find the container with id ab4b7a35110a61f32ba020a84d7e1d7624eb02a55e9ad1e39845d215bbbcb828 Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.404676 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04" path="/var/lib/kubelet/pods/4a2256f8-ee9b-466b-b9c0-ab7a30cb4b04/volumes" Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.405365 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc9ebb37-f21c-4029-a614-ef0203c99d34" path="/var/lib/kubelet/pods/bc9ebb37-f21c-4029-a614-ef0203c99d34/volumes" Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.406804 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0a6ff01-7d9f-4725-96e0-82d5750b9635" path="/var/lib/kubelet/pods/c0a6ff01-7d9f-4725-96e0-82d5750b9635/volumes" Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.407247 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa16de04-2c8d-4421-8e23-c6dd519bf22d" path="/var/lib/kubelet/pods/fa16de04-2c8d-4421-8e23-c6dd519bf22d/volumes" Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.470098 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f69459849-4h2fh"] Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.480285 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7dc5bbd594-tswgs"] Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.541866 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-85586f897b-4b47l"] Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.584365 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-85586f897b-4b47l" event={"ID":"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a","Type":"ContainerStarted","Data":"24467e9613178e2adf913a0a968ca78150ae8a7a92e4df4c30593de6baef2939"} Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.586555 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gp49b" event={"ID":"02c6c85f-c08e-4317-b3fc-35689c19bade","Type":"ContainerStarted","Data":"eb29f4b138a392e8437ec9668608bb64c52366e4b34b2e5b07bbacc1e0f9ca25"} Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.586590 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gp49b" event={"ID":"02c6c85f-c08e-4317-b3fc-35689c19bade","Type":"ContainerStarted","Data":"ab4b7a35110a61f32ba020a84d7e1d7624eb02a55e9ad1e39845d215bbbcb828"} Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.587810 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7dc5bbd594-tswgs" event={"ID":"fa9ae998-069d-4264-a6d3-2a8f51373524","Type":"ContainerStarted","Data":"7981da8d80e72de66d4fd8b6061cce48d179cee700285041efd642663be2fb1a"} Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.589705 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bk2fx" event={"ID":"2d3803d2-f0df-48c8-ba36-8a1cffdc262e","Type":"ContainerStarted","Data":"16bffb83fecd6ae0a5ff90730e795f5ec4470569589b7510f35693b5e0f33b0b"} Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.591623 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f69459849-4h2fh" event={"ID":"e0607341-6c2d-41db-8350-17f32d48aedc","Type":"ContainerStarted","Data":"85407098b936d4f04f86c3e42cb75f7e81fe796dafbdeff2a710d76a5bd7d0b6"} Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.593366 4644 generic.go:334] "Generic (PLEG): container finished" podID="c3526efa-a3f6-4ec1-8273-e302279281ba" containerID="ada423bf6ef88f9b75a74f07c40ed8cc8886ecc8051d0b1bb8fd9a931ef49917" exitCode=0 Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.593472 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-skqjj" event={"ID":"c3526efa-a3f6-4ec1-8273-e302279281ba","Type":"ContainerDied","Data":"ada423bf6ef88f9b75a74f07c40ed8cc8886ecc8051d0b1bb8fd9a931ef49917"} Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.593505 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-skqjj" event={"ID":"c3526efa-a3f6-4ec1-8273-e302279281ba","Type":"ContainerStarted","Data":"1d09fd80e3140270ac01b34449c6f39a0b46bed0db4ec3a0aa6d34f17ed37697"} Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.595058 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-668dddc65b-wlzwz" event={"ID":"b7d37a2f-8117-4d49-8e28-f06339a276cf","Type":"ContainerStarted","Data":"59b9ee31e19372047e593ceaa0fb9d19d5e6010ffdc80da1ccb5184aeff9538b"} Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.597634 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c601fe2-d854-42ef-885a-e2acc45f1607","Type":"ContainerStarted","Data":"4c06db95a2033fa4cd71eb0c60b0ba4f56d460b9fb5c4a9bf9864f402e8bff41"} Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.605711 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-gp49b" podStartSLOduration=15.605694219 podStartE2EDuration="15.605694219s" podCreationTimestamp="2025-12-13 07:01:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:02:02.600963883 +0000 UTC m=+984.815914715" watchObservedRunningTime="2025-12-13 07:02:02.605694219 +0000 UTC m=+984.820645051" Dec 13 07:02:02 crc kubenswrapper[4644]: E1213 07:02:02.610279 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16\\\"\"" pod="openstack/barbican-db-sync-dwfrr" podUID="2f264a4d-92e3-4a69-a07f-00a3f0802484" Dec 13 07:02:02 crc kubenswrapper[4644]: I1213 07:02:02.643922 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-bk2fx" podStartSLOduration=4.117961339 podStartE2EDuration="26.643903818s" podCreationTimestamp="2025-12-13 07:01:36 +0000 UTC" firstStartedPulling="2025-12-13 07:01:38.267931367 +0000 UTC m=+960.482882200" lastFinishedPulling="2025-12-13 07:02:00.793873846 +0000 UTC m=+983.008824679" observedRunningTime="2025-12-13 07:02:02.635771787 +0000 UTC m=+984.850722620" watchObservedRunningTime="2025-12-13 07:02:02.643903818 +0000 UTC m=+984.858854651" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.368372 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-589fc5f8d9-v292l"] Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.370591 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.377384 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.377828 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.383265 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-589fc5f8d9-v292l"] Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.453798 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbsvg\" (UniqueName: \"kubernetes.io/projected/b130b8ec-1bd6-4b3f-975e-82f6d903da76-kube-api-access-cbsvg\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.453867 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-ovndb-tls-certs\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.453973 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-config\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.454022 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-internal-tls-certs\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.454085 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-combined-ca-bundle\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.454102 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-public-tls-certs\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.454132 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-httpd-config\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.555920 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-combined-ca-bundle\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.555963 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-public-tls-certs\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.556013 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-httpd-config\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.556249 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbsvg\" (UniqueName: \"kubernetes.io/projected/b130b8ec-1bd6-4b3f-975e-82f6d903da76-kube-api-access-cbsvg\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.556284 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-ovndb-tls-certs\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.556368 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-config\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.556395 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-internal-tls-certs\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.561050 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-ovndb-tls-certs\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.562039 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-combined-ca-bundle\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.564710 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-httpd-config\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.566238 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-public-tls-certs\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.570175 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-internal-tls-certs\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.571140 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b130b8ec-1bd6-4b3f-975e-82f6d903da76-config\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.571945 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbsvg\" (UniqueName: \"kubernetes.io/projected/b130b8ec-1bd6-4b3f-975e-82f6d903da76-kube-api-access-cbsvg\") pod \"neutron-589fc5f8d9-v292l\" (UID: \"b130b8ec-1bd6-4b3f-975e-82f6d903da76\") " pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.616097 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-668dddc65b-wlzwz" event={"ID":"b7d37a2f-8117-4d49-8e28-f06339a276cf","Type":"ContainerStarted","Data":"b3940b97a80c84f8272bb8789648adfa5593257b2def6644d0b1de8b3127bbdf"} Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.617528 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-85586f897b-4b47l" event={"ID":"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a","Type":"ContainerStarted","Data":"26ca95e03e55f22c0404cf6fee0e21bee0f31128ea90589eada6a053eed352e9"} Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.618839 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7dc5bbd594-tswgs" event={"ID":"fa9ae998-069d-4264-a6d3-2a8f51373524","Type":"ContainerStarted","Data":"6d04b355eab221d23c1b8cfaf6910e1003738fd4205b9839ef41f687e14d4e45"} Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.620077 4644 generic.go:334] "Generic (PLEG): container finished" podID="e0607341-6c2d-41db-8350-17f32d48aedc" containerID="a0a1c7c45bdea2e04b596f1d3aae381e9131a7d28cfdaab4a2ec0de4f907d002" exitCode=0 Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.620168 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f69459849-4h2fh" event={"ID":"e0607341-6c2d-41db-8350-17f32d48aedc","Type":"ContainerDied","Data":"a0a1c7c45bdea2e04b596f1d3aae381e9131a7d28cfdaab4a2ec0de4f907d002"} Dec 13 07:02:03 crc kubenswrapper[4644]: I1213 07:02:03.701053 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.199369 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-589fc5f8d9-v292l"] Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.628486 4644 generic.go:334] "Generic (PLEG): container finished" podID="c3526efa-a3f6-4ec1-8273-e302279281ba" containerID="b514ffc45e3312c2d78e3842ac1360d5addb687cd4bb46397ccc6524c8cef6a9" exitCode=0 Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.628673 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-skqjj" event={"ID":"c3526efa-a3f6-4ec1-8273-e302279281ba","Type":"ContainerDied","Data":"b514ffc45e3312c2d78e3842ac1360d5addb687cd4bb46397ccc6524c8cef6a9"} Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.630591 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-668dddc65b-wlzwz" event={"ID":"b7d37a2f-8117-4d49-8e28-f06339a276cf","Type":"ContainerStarted","Data":"85a69c183521ad7f5532297ee03de837fb673268fd3a5367fa2c60f8eb914d7b"} Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.631940 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-85586f897b-4b47l" event={"ID":"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a","Type":"ContainerStarted","Data":"05f6ca39a0251b40d12afc00d55c4ec5e4fc967000cd617e5ba52ce8830e779d"} Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.633335 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7dc5bbd594-tswgs" event={"ID":"fa9ae998-069d-4264-a6d3-2a8f51373524","Type":"ContainerStarted","Data":"c8e61f66d6f403376205f93aedfe0ee73826f002c15e69a024c618f920b7ed96"} Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.635061 4644 generic.go:334] "Generic (PLEG): container finished" podID="2d3803d2-f0df-48c8-ba36-8a1cffdc262e" containerID="16bffb83fecd6ae0a5ff90730e795f5ec4470569589b7510f35693b5e0f33b0b" exitCode=0 Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.635088 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bk2fx" event={"ID":"2d3803d2-f0df-48c8-ba36-8a1cffdc262e","Type":"ContainerDied","Data":"16bffb83fecd6ae0a5ff90730e795f5ec4470569589b7510f35693b5e0f33b0b"} Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.667765 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2jp6k"] Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.670238 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.676402 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2jp6k"] Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.710981 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5bc56d6f79-jjz79" podUID="bc9ebb37-f21c-4029-a614-ef0203c99d34" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.119:5353: i/o timeout" Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.781696 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88fd6885-187f-443b-a294-88293678f36b-utilities\") pod \"community-operators-2jp6k\" (UID: \"88fd6885-187f-443b-a294-88293678f36b\") " pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.782023 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88fd6885-187f-443b-a294-88293678f36b-catalog-content\") pod \"community-operators-2jp6k\" (UID: \"88fd6885-187f-443b-a294-88293678f36b\") " pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.782193 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c48sl\" (UniqueName: \"kubernetes.io/projected/88fd6885-187f-443b-a294-88293678f36b-kube-api-access-c48sl\") pod \"community-operators-2jp6k\" (UID: \"88fd6885-187f-443b-a294-88293678f36b\") " pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.883889 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88fd6885-187f-443b-a294-88293678f36b-utilities\") pod \"community-operators-2jp6k\" (UID: \"88fd6885-187f-443b-a294-88293678f36b\") " pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.884113 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88fd6885-187f-443b-a294-88293678f36b-catalog-content\") pod \"community-operators-2jp6k\" (UID: \"88fd6885-187f-443b-a294-88293678f36b\") " pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.884224 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c48sl\" (UniqueName: \"kubernetes.io/projected/88fd6885-187f-443b-a294-88293678f36b-kube-api-access-c48sl\") pod \"community-operators-2jp6k\" (UID: \"88fd6885-187f-443b-a294-88293678f36b\") " pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.885078 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88fd6885-187f-443b-a294-88293678f36b-utilities\") pod \"community-operators-2jp6k\" (UID: \"88fd6885-187f-443b-a294-88293678f36b\") " pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.885374 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88fd6885-187f-443b-a294-88293678f36b-catalog-content\") pod \"community-operators-2jp6k\" (UID: \"88fd6885-187f-443b-a294-88293678f36b\") " pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.902112 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c48sl\" (UniqueName: \"kubernetes.io/projected/88fd6885-187f-443b-a294-88293678f36b-kube-api-access-c48sl\") pod \"community-operators-2jp6k\" (UID: \"88fd6885-187f-443b-a294-88293678f36b\") " pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:04 crc kubenswrapper[4644]: I1213 07:02:04.987620 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:05 crc kubenswrapper[4644]: I1213 07:02:05.643837 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c601fe2-d854-42ef-885a-e2acc45f1607","Type":"ContainerStarted","Data":"13f94c785a0f578c0adcd5bed3faa201a8196bdaa2565387c561700a05b3f917"} Dec 13 07:02:05 crc kubenswrapper[4644]: I1213 07:02:05.646383 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f69459849-4h2fh" event={"ID":"e0607341-6c2d-41db-8350-17f32d48aedc","Type":"ContainerStarted","Data":"659a265edd086110a058d36a2c66fad80a6f537ca32f3bb654b6d0287f308a9b"} Dec 13 07:02:05 crc kubenswrapper[4644]: I1213 07:02:05.646729 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:05 crc kubenswrapper[4644]: I1213 07:02:05.669181 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-589fc5f8d9-v292l" event={"ID":"b130b8ec-1bd6-4b3f-975e-82f6d903da76","Type":"ContainerStarted","Data":"c7af722fe94a8fb737d5657428c5b9755c085e23dea681804dacef1c806bf307"} Dec 13 07:02:05 crc kubenswrapper[4644]: I1213 07:02:05.669223 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-589fc5f8d9-v292l" event={"ID":"b130b8ec-1bd6-4b3f-975e-82f6d903da76","Type":"ContainerStarted","Data":"21a4abdbcb70708495522a5c605dc75e2f2ae9541937abfa12c1719c26ec4d10"} Dec 13 07:02:05 crc kubenswrapper[4644]: I1213 07:02:05.670056 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:05 crc kubenswrapper[4644]: I1213 07:02:05.693627 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7f69459849-4h2fh" podStartSLOduration=4.693606418 podStartE2EDuration="4.693606418s" podCreationTimestamp="2025-12-13 07:02:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:02:05.671293118 +0000 UTC m=+987.886243951" watchObservedRunningTime="2025-12-13 07:02:05.693606418 +0000 UTC m=+987.908557250" Dec 13 07:02:05 crc kubenswrapper[4644]: I1213 07:02:05.712052 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-668dddc65b-wlzwz" podStartSLOduration=20.097686754 podStartE2EDuration="20.712026505s" podCreationTimestamp="2025-12-13 07:01:45 +0000 UTC" firstStartedPulling="2025-12-13 07:02:01.865267358 +0000 UTC m=+984.080218191" lastFinishedPulling="2025-12-13 07:02:02.479607108 +0000 UTC m=+984.694557942" observedRunningTime="2025-12-13 07:02:05.708842678 +0000 UTC m=+987.923793511" watchObservedRunningTime="2025-12-13 07:02:05.712026505 +0000 UTC m=+987.926977339" Dec 13 07:02:05 crc kubenswrapper[4644]: I1213 07:02:05.730532 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7dc5bbd594-tswgs" podStartSLOduration=20.73051437 podStartE2EDuration="20.73051437s" podCreationTimestamp="2025-12-13 07:01:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:02:05.729153421 +0000 UTC m=+987.944104253" watchObservedRunningTime="2025-12-13 07:02:05.73051437 +0000 UTC m=+987.945465193" Dec 13 07:02:05 crc kubenswrapper[4644]: I1213 07:02:05.750052 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2jp6k"] Dec 13 07:02:05 crc kubenswrapper[4644]: I1213 07:02:05.760614 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-85586f897b-4b47l" podStartSLOduration=4.7605902570000005 podStartE2EDuration="4.760590257s" podCreationTimestamp="2025-12-13 07:02:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:02:05.744715294 +0000 UTC m=+987.959666127" watchObservedRunningTime="2025-12-13 07:02:05.760590257 +0000 UTC m=+987.975541089" Dec 13 07:02:05 crc kubenswrapper[4644]: I1213 07:02:05.979666 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bk2fx" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.112891 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-scripts\") pod \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.112948 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-combined-ca-bundle\") pod \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.113047 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-logs\") pod \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.113100 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-config-data\") pod \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.113220 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ggszd\" (UniqueName: \"kubernetes.io/projected/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-kube-api-access-ggszd\") pod \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\" (UID: \"2d3803d2-f0df-48c8-ba36-8a1cffdc262e\") " Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.114573 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-logs" (OuterVolumeSpecName: "logs") pod "2d3803d2-f0df-48c8-ba36-8a1cffdc262e" (UID: "2d3803d2-f0df-48c8-ba36-8a1cffdc262e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.119001 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-kube-api-access-ggszd" (OuterVolumeSpecName: "kube-api-access-ggszd") pod "2d3803d2-f0df-48c8-ba36-8a1cffdc262e" (UID: "2d3803d2-f0df-48c8-ba36-8a1cffdc262e"). InnerVolumeSpecName "kube-api-access-ggszd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.119049 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-scripts" (OuterVolumeSpecName: "scripts") pod "2d3803d2-f0df-48c8-ba36-8a1cffdc262e" (UID: "2d3803d2-f0df-48c8-ba36-8a1cffdc262e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.148639 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-config-data" (OuterVolumeSpecName: "config-data") pod "2d3803d2-f0df-48c8-ba36-8a1cffdc262e" (UID: "2d3803d2-f0df-48c8-ba36-8a1cffdc262e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.153811 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d3803d2-f0df-48c8-ba36-8a1cffdc262e" (UID: "2d3803d2-f0df-48c8-ba36-8a1cffdc262e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.215219 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ggszd\" (UniqueName: \"kubernetes.io/projected/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-kube-api-access-ggszd\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.215508 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.215521 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.215529 4644 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-logs\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.215537 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d3803d2-f0df-48c8-ba36-8a1cffdc262e-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.688969 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-589fc5f8d9-v292l" event={"ID":"b130b8ec-1bd6-4b3f-975e-82f6d903da76","Type":"ContainerStarted","Data":"f59d6d0109332936fabb6982cb050d886c9c1de89b095d87678e7f2a54d7694f"} Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.689541 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.694364 4644 generic.go:334] "Generic (PLEG): container finished" podID="88fd6885-187f-443b-a294-88293678f36b" containerID="6b8f090c975bd3dc55ef3f3f5bd006743d60d3875d724f3cde0777c271ee5826" exitCode=0 Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.694424 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2jp6k" event={"ID":"88fd6885-187f-443b-a294-88293678f36b","Type":"ContainerDied","Data":"6b8f090c975bd3dc55ef3f3f5bd006743d60d3875d724f3cde0777c271ee5826"} Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.694464 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2jp6k" event={"ID":"88fd6885-187f-443b-a294-88293678f36b","Type":"ContainerStarted","Data":"9c6a7ad9067a2837bf5d06964ff57a419c9c9ea3f437b002358f3c1d2d8b81f1"} Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.696681 4644 generic.go:334] "Generic (PLEG): container finished" podID="02c6c85f-c08e-4317-b3fc-35689c19bade" containerID="eb29f4b138a392e8437ec9668608bb64c52366e4b34b2e5b07bbacc1e0f9ca25" exitCode=0 Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.696749 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gp49b" event={"ID":"02c6c85f-c08e-4317-b3fc-35689c19bade","Type":"ContainerDied","Data":"eb29f4b138a392e8437ec9668608bb64c52366e4b34b2e5b07bbacc1e0f9ca25"} Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.710954 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-589fc5f8d9-v292l" podStartSLOduration=3.71093815 podStartE2EDuration="3.71093815s" podCreationTimestamp="2025-12-13 07:02:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:02:06.707063865 +0000 UTC m=+988.922014698" watchObservedRunningTime="2025-12-13 07:02:06.71093815 +0000 UTC m=+988.925888983" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.711311 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bk2fx" event={"ID":"2d3803d2-f0df-48c8-ba36-8a1cffdc262e","Type":"ContainerDied","Data":"161d77c47902e56c758c087c45ff72afbc5ed6042ccb504a446028b38cefb20a"} Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.711353 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="161d77c47902e56c758c087c45ff72afbc5ed6042ccb504a446028b38cefb20a" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.711319 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bk2fx" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.730655 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-skqjj" event={"ID":"c3526efa-a3f6-4ec1-8273-e302279281ba","Type":"ContainerStarted","Data":"9dd6f9d64c68c9cae5c91f7cab905bad7206402b8ba8701ead257605cd19faff"} Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.735327 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-8f5479c4d-vcv6r"] Dec 13 07:02:06 crc kubenswrapper[4644]: E1213 07:02:06.735768 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d3803d2-f0df-48c8-ba36-8a1cffdc262e" containerName="placement-db-sync" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.735786 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d3803d2-f0df-48c8-ba36-8a1cffdc262e" containerName="placement-db-sync" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.735932 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d3803d2-f0df-48c8-ba36-8a1cffdc262e" containerName="placement-db-sync" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.736792 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.741705 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.741763 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-nfkkq" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.741853 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.741946 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.751485 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.756362 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8f5479c4d-vcv6r"] Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.821217 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-skqjj" podStartSLOduration=18.923822119 podStartE2EDuration="22.821201033s" podCreationTimestamp="2025-12-13 07:01:44 +0000 UTC" firstStartedPulling="2025-12-13 07:02:02.59454964 +0000 UTC m=+984.809500474" lastFinishedPulling="2025-12-13 07:02:06.491928555 +0000 UTC m=+988.706879388" observedRunningTime="2025-12-13 07:02:06.812554454 +0000 UTC m=+989.027505276" watchObservedRunningTime="2025-12-13 07:02:06.821201033 +0000 UTC m=+989.036151866" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.826019 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj7n8\" (UniqueName: \"kubernetes.io/projected/ccdc6627-1b86-41d6-993f-a2c0e641b81c-kube-api-access-fj7n8\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.826252 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccdc6627-1b86-41d6-993f-a2c0e641b81c-public-tls-certs\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.826325 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccdc6627-1b86-41d6-993f-a2c0e641b81c-internal-tls-certs\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.826429 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ccdc6627-1b86-41d6-993f-a2c0e641b81c-logs\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.826517 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccdc6627-1b86-41d6-993f-a2c0e641b81c-combined-ca-bundle\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.826630 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccdc6627-1b86-41d6-993f-a2c0e641b81c-config-data\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.826752 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccdc6627-1b86-41d6-993f-a2c0e641b81c-scripts\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.928228 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccdc6627-1b86-41d6-993f-a2c0e641b81c-combined-ca-bundle\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.928347 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccdc6627-1b86-41d6-993f-a2c0e641b81c-config-data\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.928422 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccdc6627-1b86-41d6-993f-a2c0e641b81c-scripts\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.928513 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj7n8\" (UniqueName: \"kubernetes.io/projected/ccdc6627-1b86-41d6-993f-a2c0e641b81c-kube-api-access-fj7n8\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.929039 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccdc6627-1b86-41d6-993f-a2c0e641b81c-public-tls-certs\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.929090 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccdc6627-1b86-41d6-993f-a2c0e641b81c-internal-tls-certs\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.929151 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ccdc6627-1b86-41d6-993f-a2c0e641b81c-logs\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.929590 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ccdc6627-1b86-41d6-993f-a2c0e641b81c-logs\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.934054 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccdc6627-1b86-41d6-993f-a2c0e641b81c-public-tls-certs\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.934903 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccdc6627-1b86-41d6-993f-a2c0e641b81c-combined-ca-bundle\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.936483 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccdc6627-1b86-41d6-993f-a2c0e641b81c-config-data\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.936858 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccdc6627-1b86-41d6-993f-a2c0e641b81c-internal-tls-certs\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.947381 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccdc6627-1b86-41d6-993f-a2c0e641b81c-scripts\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:06 crc kubenswrapper[4644]: I1213 07:02:06.948643 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj7n8\" (UniqueName: \"kubernetes.io/projected/ccdc6627-1b86-41d6-993f-a2c0e641b81c-kube-api-access-fj7n8\") pod \"placement-8f5479c4d-vcv6r\" (UID: \"ccdc6627-1b86-41d6-993f-a2c0e641b81c\") " pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:07 crc kubenswrapper[4644]: I1213 07:02:07.053572 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:07 crc kubenswrapper[4644]: I1213 07:02:07.537036 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8f5479c4d-vcv6r"] Dec 13 07:02:07 crc kubenswrapper[4644]: I1213 07:02:07.759971 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2jp6k" event={"ID":"88fd6885-187f-443b-a294-88293678f36b","Type":"ContainerStarted","Data":"07fcd86043563bb5f1032190351410a21b57b98f2182e73522607e2ab9568609"} Dec 13 07:02:07 crc kubenswrapper[4644]: I1213 07:02:07.763564 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8f5479c4d-vcv6r" event={"ID":"ccdc6627-1b86-41d6-993f-a2c0e641b81c","Type":"ContainerStarted","Data":"1ed5fefd5182f58f5647a9e0c7922259df7f91983a1cc5f75c02e9c1e4e3966e"} Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.158799 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.260002 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-fernet-keys\") pod \"02c6c85f-c08e-4317-b3fc-35689c19bade\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.260049 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fs6n4\" (UniqueName: \"kubernetes.io/projected/02c6c85f-c08e-4317-b3fc-35689c19bade-kube-api-access-fs6n4\") pod \"02c6c85f-c08e-4317-b3fc-35689c19bade\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.260104 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-scripts\") pod \"02c6c85f-c08e-4317-b3fc-35689c19bade\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.260156 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-config-data\") pod \"02c6c85f-c08e-4317-b3fc-35689c19bade\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.260183 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-combined-ca-bundle\") pod \"02c6c85f-c08e-4317-b3fc-35689c19bade\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.260224 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-credential-keys\") pod \"02c6c85f-c08e-4317-b3fc-35689c19bade\" (UID: \"02c6c85f-c08e-4317-b3fc-35689c19bade\") " Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.264040 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02c6c85f-c08e-4317-b3fc-35689c19bade-kube-api-access-fs6n4" (OuterVolumeSpecName: "kube-api-access-fs6n4") pod "02c6c85f-c08e-4317-b3fc-35689c19bade" (UID: "02c6c85f-c08e-4317-b3fc-35689c19bade"). InnerVolumeSpecName "kube-api-access-fs6n4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.265085 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-scripts" (OuterVolumeSpecName: "scripts") pod "02c6c85f-c08e-4317-b3fc-35689c19bade" (UID: "02c6c85f-c08e-4317-b3fc-35689c19bade"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.272679 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "02c6c85f-c08e-4317-b3fc-35689c19bade" (UID: "02c6c85f-c08e-4317-b3fc-35689c19bade"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.273198 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "02c6c85f-c08e-4317-b3fc-35689c19bade" (UID: "02c6c85f-c08e-4317-b3fc-35689c19bade"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.286720 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-config-data" (OuterVolumeSpecName: "config-data") pod "02c6c85f-c08e-4317-b3fc-35689c19bade" (UID: "02c6c85f-c08e-4317-b3fc-35689c19bade"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.300659 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02c6c85f-c08e-4317-b3fc-35689c19bade" (UID: "02c6c85f-c08e-4317-b3fc-35689c19bade"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.362419 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fs6n4\" (UniqueName: \"kubernetes.io/projected/02c6c85f-c08e-4317-b3fc-35689c19bade-kube-api-access-fs6n4\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.362467 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.362476 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.362484 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.362493 4644 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.362500 4644 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/02c6c85f-c08e-4317-b3fc-35689c19bade-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.781869 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gp49b" event={"ID":"02c6c85f-c08e-4317-b3fc-35689c19bade","Type":"ContainerDied","Data":"ab4b7a35110a61f32ba020a84d7e1d7624eb02a55e9ad1e39845d215bbbcb828"} Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.781909 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab4b7a35110a61f32ba020a84d7e1d7624eb02a55e9ad1e39845d215bbbcb828" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.781971 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gp49b" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.786407 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8f5479c4d-vcv6r" event={"ID":"ccdc6627-1b86-41d6-993f-a2c0e641b81c","Type":"ContainerStarted","Data":"b6c731b96952d87d33d2ff6d268ae448d70b993e0e1dfe740934d3b76deb75bb"} Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.786458 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8f5479c4d-vcv6r" event={"ID":"ccdc6627-1b86-41d6-993f-a2c0e641b81c","Type":"ContainerStarted","Data":"223c06d06f71310115452b24360e50ed01df4eb0c4f3a368053af15e13571722"} Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.787346 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.787392 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.790169 4644 generic.go:334] "Generic (PLEG): container finished" podID="88fd6885-187f-443b-a294-88293678f36b" containerID="07fcd86043563bb5f1032190351410a21b57b98f2182e73522607e2ab9568609" exitCode=0 Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.790209 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2jp6k" event={"ID":"88fd6885-187f-443b-a294-88293678f36b","Type":"ContainerDied","Data":"07fcd86043563bb5f1032190351410a21b57b98f2182e73522607e2ab9568609"} Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.813690 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-84867c6db9-t2k7k"] Dec 13 07:02:08 crc kubenswrapper[4644]: E1213 07:02:08.814276 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02c6c85f-c08e-4317-b3fc-35689c19bade" containerName="keystone-bootstrap" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.814389 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="02c6c85f-c08e-4317-b3fc-35689c19bade" containerName="keystone-bootstrap" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.814704 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="02c6c85f-c08e-4317-b3fc-35689c19bade" containerName="keystone-bootstrap" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.815300 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.816247 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-8f5479c4d-vcv6r" podStartSLOduration=2.816235208 podStartE2EDuration="2.816235208s" podCreationTimestamp="2025-12-13 07:02:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:02:08.810426947 +0000 UTC m=+991.025377780" watchObservedRunningTime="2025-12-13 07:02:08.816235208 +0000 UTC m=+991.031186041" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.820516 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.820608 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.820666 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.820765 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.820801 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-5hlkk" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.820954 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.838706 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-84867c6db9-t2k7k"] Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.997430 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-public-tls-certs\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.997751 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-combined-ca-bundle\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.997794 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-internal-tls-certs\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.997810 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-config-data\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.997846 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-scripts\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.997979 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-fernet-keys\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.998067 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-credential-keys\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:08 crc kubenswrapper[4644]: I1213 07:02:08.998316 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk66r\" (UniqueName: \"kubernetes.io/projected/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-kube-api-access-lk66r\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.099529 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-public-tls-certs\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.099587 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-combined-ca-bundle\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.099618 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-config-data\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.099635 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-internal-tls-certs\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.099669 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-scripts\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.100044 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-fernet-keys\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.100112 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-credential-keys\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.100197 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk66r\" (UniqueName: \"kubernetes.io/projected/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-kube-api-access-lk66r\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.104597 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-public-tls-certs\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.107484 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-internal-tls-certs\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.107612 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-combined-ca-bundle\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.108300 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-scripts\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.108956 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-config-data\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.113922 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-credential-keys\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.114651 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-fernet-keys\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.127462 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk66r\" (UniqueName: \"kubernetes.io/projected/17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa-kube-api-access-lk66r\") pod \"keystone-84867c6db9-t2k7k\" (UID: \"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa\") " pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:09 crc kubenswrapper[4644]: I1213 07:02:09.146488 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.408618 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.457249 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-694dbb6647-8bdf9"] Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.457464 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" podUID="d1a8f072-5116-43bc-9280-288e5eae7827" containerName="dnsmasq-dns" containerID="cri-o://e416575821fc7ad7bddf27fe7c2d22cb3be5ea26a8834e1f70c9aa612967efeb" gracePeriod=10 Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.775137 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-84867c6db9-t2k7k"] Dec 13 07:02:11 crc kubenswrapper[4644]: W1213 07:02:11.780670 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17cae40f_e78c_4b8b_8e5b_3cf6548c1cfa.slice/crio-71bf3ffd5f4facbea3bb97720ec4ecc704ceec5bc656a1f7e0b795e1a3089e9f WatchSource:0}: Error finding container 71bf3ffd5f4facbea3bb97720ec4ecc704ceec5bc656a1f7e0b795e1a3089e9f: Status 404 returned error can't find the container with id 71bf3ffd5f4facbea3bb97720ec4ecc704ceec5bc656a1f7e0b795e1a3089e9f Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.826221 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2jp6k" event={"ID":"88fd6885-187f-443b-a294-88293678f36b","Type":"ContainerStarted","Data":"7ef975efa5375b23cfde12b1affb04e48f0398ce3c0ed23ae7519218b7b7ff8f"} Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.831053 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c601fe2-d854-42ef-885a-e2acc45f1607","Type":"ContainerStarted","Data":"5dc7e6e965f8163a120c6a2e67db178392a9c63937bfc8bb91a45883e8f14a0e"} Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.832228 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-84867c6db9-t2k7k" event={"ID":"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa","Type":"ContainerStarted","Data":"71bf3ffd5f4facbea3bb97720ec4ecc704ceec5bc656a1f7e0b795e1a3089e9f"} Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.839082 4644 generic.go:334] "Generic (PLEG): container finished" podID="d1a8f072-5116-43bc-9280-288e5eae7827" containerID="e416575821fc7ad7bddf27fe7c2d22cb3be5ea26a8834e1f70c9aa612967efeb" exitCode=0 Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.839112 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" event={"ID":"d1a8f072-5116-43bc-9280-288e5eae7827","Type":"ContainerDied","Data":"e416575821fc7ad7bddf27fe7c2d22cb3be5ea26a8834e1f70c9aa612967efeb"} Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.861215 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.880411 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2jp6k" podStartSLOduration=3.249884079 podStartE2EDuration="7.880394913s" podCreationTimestamp="2025-12-13 07:02:04 +0000 UTC" firstStartedPulling="2025-12-13 07:02:06.695851781 +0000 UTC m=+988.910802605" lastFinishedPulling="2025-12-13 07:02:11.326362596 +0000 UTC m=+993.541313439" observedRunningTime="2025-12-13 07:02:11.849919295 +0000 UTC m=+994.064870128" watchObservedRunningTime="2025-12-13 07:02:11.880394913 +0000 UTC m=+994.095345745" Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.953145 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-config\") pod \"d1a8f072-5116-43bc-9280-288e5eae7827\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.953207 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-ovsdbserver-nb\") pod \"d1a8f072-5116-43bc-9280-288e5eae7827\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.953235 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-dns-svc\") pod \"d1a8f072-5116-43bc-9280-288e5eae7827\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.953319 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wf9m4\" (UniqueName: \"kubernetes.io/projected/d1a8f072-5116-43bc-9280-288e5eae7827-kube-api-access-wf9m4\") pod \"d1a8f072-5116-43bc-9280-288e5eae7827\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.953344 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-ovsdbserver-sb\") pod \"d1a8f072-5116-43bc-9280-288e5eae7827\" (UID: \"d1a8f072-5116-43bc-9280-288e5eae7827\") " Dec 13 07:02:11 crc kubenswrapper[4644]: I1213 07:02:11.965180 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1a8f072-5116-43bc-9280-288e5eae7827-kube-api-access-wf9m4" (OuterVolumeSpecName: "kube-api-access-wf9m4") pod "d1a8f072-5116-43bc-9280-288e5eae7827" (UID: "d1a8f072-5116-43bc-9280-288e5eae7827"). InnerVolumeSpecName "kube-api-access-wf9m4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.014704 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d1a8f072-5116-43bc-9280-288e5eae7827" (UID: "d1a8f072-5116-43bc-9280-288e5eae7827"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.016572 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d1a8f072-5116-43bc-9280-288e5eae7827" (UID: "d1a8f072-5116-43bc-9280-288e5eae7827"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.019176 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-config" (OuterVolumeSpecName: "config") pod "d1a8f072-5116-43bc-9280-288e5eae7827" (UID: "d1a8f072-5116-43bc-9280-288e5eae7827"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.024646 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d1a8f072-5116-43bc-9280-288e5eae7827" (UID: "d1a8f072-5116-43bc-9280-288e5eae7827"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.055576 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.055601 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.055611 4644 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.055621 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wf9m4\" (UniqueName: \"kubernetes.io/projected/d1a8f072-5116-43bc-9280-288e5eae7827-kube-api-access-wf9m4\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.055630 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d1a8f072-5116-43bc-9280-288e5eae7827-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.845919 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-84867c6db9-t2k7k" event={"ID":"17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa","Type":"ContainerStarted","Data":"dc743aa55dd7175f95e7243d95b0895bc307a9fffc1c9544f56b98df5c77b51a"} Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.847686 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.848981 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.849103 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-694dbb6647-8bdf9" event={"ID":"d1a8f072-5116-43bc-9280-288e5eae7827","Type":"ContainerDied","Data":"b5a619ca63388210ee9f085a035fc585cd4dd4ebb4a82f7bc67ca106b750cfe5"} Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.849167 4644 scope.go:117] "RemoveContainer" containerID="e416575821fc7ad7bddf27fe7c2d22cb3be5ea26a8834e1f70c9aa612967efeb" Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.870039 4644 scope.go:117] "RemoveContainer" containerID="4a8cef6bdbe085f7bd133b5fdda36436f49c4b0bde5b61b7819d09072a8b4013" Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.872401 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-84867c6db9-t2k7k" podStartSLOduration=4.872382698 podStartE2EDuration="4.872382698s" podCreationTimestamp="2025-12-13 07:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:02:12.867601126 +0000 UTC m=+995.082551960" watchObservedRunningTime="2025-12-13 07:02:12.872382698 +0000 UTC m=+995.087333531" Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.901490 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-694dbb6647-8bdf9"] Dec 13 07:02:12 crc kubenswrapper[4644]: I1213 07:02:12.913573 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-694dbb6647-8bdf9"] Dec 13 07:02:13 crc kubenswrapper[4644]: I1213 07:02:13.869148 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-fdgxf" event={"ID":"bf873768-3def-4a7c-b48b-fb3749f8c927","Type":"ContainerStarted","Data":"9207ac8bfea191ebadb1a4cf6b5338286a7fec14c62b9337f1b91d5a4bf571ea"} Dec 13 07:02:13 crc kubenswrapper[4644]: I1213 07:02:13.888383 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-fdgxf" podStartSLOduration=3.022025247 podStartE2EDuration="37.888366066s" podCreationTimestamp="2025-12-13 07:01:36 +0000 UTC" firstStartedPulling="2025-12-13 07:01:38.219317631 +0000 UTC m=+960.434268464" lastFinishedPulling="2025-12-13 07:02:13.08565845 +0000 UTC m=+995.300609283" observedRunningTime="2025-12-13 07:02:13.881676225 +0000 UTC m=+996.096627059" watchObservedRunningTime="2025-12-13 07:02:13.888366066 +0000 UTC m=+996.103316898" Dec 13 07:02:14 crc kubenswrapper[4644]: I1213 07:02:14.398154 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1a8f072-5116-43bc-9280-288e5eae7827" path="/var/lib/kubelet/pods/d1a8f072-5116-43bc-9280-288e5eae7827/volumes" Dec 13 07:02:14 crc kubenswrapper[4644]: I1213 07:02:14.444329 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:02:14 crc kubenswrapper[4644]: I1213 07:02:14.444620 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:02:14 crc kubenswrapper[4644]: I1213 07:02:14.478285 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:02:14 crc kubenswrapper[4644]: I1213 07:02:14.908437 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:02:14 crc kubenswrapper[4644]: I1213 07:02:14.947254 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-skqjj"] Dec 13 07:02:14 crc kubenswrapper[4644]: I1213 07:02:14.988643 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:14 crc kubenswrapper[4644]: I1213 07:02:14.988689 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:15 crc kubenswrapper[4644]: I1213 07:02:15.022931 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:15 crc kubenswrapper[4644]: I1213 07:02:15.483185 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:02:15 crc kubenswrapper[4644]: I1213 07:02:15.483286 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:02:15 crc kubenswrapper[4644]: I1213 07:02:15.484501 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7dc5bbd594-tswgs" podUID="fa9ae998-069d-4264-a6d3-2a8f51373524" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.139:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.139:8443: connect: connection refused" Dec 13 07:02:15 crc kubenswrapper[4644]: I1213 07:02:15.577411 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:02:15 crc kubenswrapper[4644]: I1213 07:02:15.578738 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:02:15 crc kubenswrapper[4644]: I1213 07:02:15.580152 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-668dddc65b-wlzwz" podUID="b7d37a2f-8117-4d49-8e28-f06339a276cf" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.140:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.140:8443: connect: connection refused" Dec 13 07:02:15 crc kubenswrapper[4644]: I1213 07:02:15.886568 4644 generic.go:334] "Generic (PLEG): container finished" podID="bf873768-3def-4a7c-b48b-fb3749f8c927" containerID="9207ac8bfea191ebadb1a4cf6b5338286a7fec14c62b9337f1b91d5a4bf571ea" exitCode=0 Dec 13 07:02:15 crc kubenswrapper[4644]: I1213 07:02:15.886653 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-fdgxf" event={"ID":"bf873768-3def-4a7c-b48b-fb3749f8c927","Type":"ContainerDied","Data":"9207ac8bfea191ebadb1a4cf6b5338286a7fec14c62b9337f1b91d5a4bf571ea"} Dec 13 07:02:16 crc kubenswrapper[4644]: I1213 07:02:16.897545 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-skqjj" podUID="c3526efa-a3f6-4ec1-8273-e302279281ba" containerName="registry-server" containerID="cri-o://9dd6f9d64c68c9cae5c91f7cab905bad7206402b8ba8701ead257605cd19faff" gracePeriod=2 Dec 13 07:02:17 crc kubenswrapper[4644]: I1213 07:02:17.817140 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:02:17 crc kubenswrapper[4644]: I1213 07:02:17.907802 4644 generic.go:334] "Generic (PLEG): container finished" podID="c3526efa-a3f6-4ec1-8273-e302279281ba" containerID="9dd6f9d64c68c9cae5c91f7cab905bad7206402b8ba8701ead257605cd19faff" exitCode=0 Dec 13 07:02:17 crc kubenswrapper[4644]: I1213 07:02:17.907852 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-skqjj" Dec 13 07:02:17 crc kubenswrapper[4644]: I1213 07:02:17.907852 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-skqjj" event={"ID":"c3526efa-a3f6-4ec1-8273-e302279281ba","Type":"ContainerDied","Data":"9dd6f9d64c68c9cae5c91f7cab905bad7206402b8ba8701ead257605cd19faff"} Dec 13 07:02:17 crc kubenswrapper[4644]: I1213 07:02:17.907944 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-skqjj" event={"ID":"c3526efa-a3f6-4ec1-8273-e302279281ba","Type":"ContainerDied","Data":"1d09fd80e3140270ac01b34449c6f39a0b46bed0db4ec3a0aa6d34f17ed37697"} Dec 13 07:02:17 crc kubenswrapper[4644]: I1213 07:02:17.907970 4644 scope.go:117] "RemoveContainer" containerID="9dd6f9d64c68c9cae5c91f7cab905bad7206402b8ba8701ead257605cd19faff" Dec 13 07:02:17 crc kubenswrapper[4644]: I1213 07:02:17.964948 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3526efa-a3f6-4ec1-8273-e302279281ba-utilities\") pod \"c3526efa-a3f6-4ec1-8273-e302279281ba\" (UID: \"c3526efa-a3f6-4ec1-8273-e302279281ba\") " Dec 13 07:02:17 crc kubenswrapper[4644]: I1213 07:02:17.965015 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z66nv\" (UniqueName: \"kubernetes.io/projected/c3526efa-a3f6-4ec1-8273-e302279281ba-kube-api-access-z66nv\") pod \"c3526efa-a3f6-4ec1-8273-e302279281ba\" (UID: \"c3526efa-a3f6-4ec1-8273-e302279281ba\") " Dec 13 07:02:17 crc kubenswrapper[4644]: I1213 07:02:17.965040 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3526efa-a3f6-4ec1-8273-e302279281ba-catalog-content\") pod \"c3526efa-a3f6-4ec1-8273-e302279281ba\" (UID: \"c3526efa-a3f6-4ec1-8273-e302279281ba\") " Dec 13 07:02:17 crc kubenswrapper[4644]: I1213 07:02:17.965702 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3526efa-a3f6-4ec1-8273-e302279281ba-utilities" (OuterVolumeSpecName: "utilities") pod "c3526efa-a3f6-4ec1-8273-e302279281ba" (UID: "c3526efa-a3f6-4ec1-8273-e302279281ba"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:17 crc kubenswrapper[4644]: I1213 07:02:17.983238 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3526efa-a3f6-4ec1-8273-e302279281ba-kube-api-access-z66nv" (OuterVolumeSpecName: "kube-api-access-z66nv") pod "c3526efa-a3f6-4ec1-8273-e302279281ba" (UID: "c3526efa-a3f6-4ec1-8273-e302279281ba"). InnerVolumeSpecName "kube-api-access-z66nv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.014278 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3526efa-a3f6-4ec1-8273-e302279281ba-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c3526efa-a3f6-4ec1-8273-e302279281ba" (UID: "c3526efa-a3f6-4ec1-8273-e302279281ba"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.067513 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3526efa-a3f6-4ec1-8273-e302279281ba-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.067547 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z66nv\" (UniqueName: \"kubernetes.io/projected/c3526efa-a3f6-4ec1-8273-e302279281ba-kube-api-access-z66nv\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.067559 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3526efa-a3f6-4ec1-8273-e302279281ba-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.083469 4644 scope.go:117] "RemoveContainer" containerID="b514ffc45e3312c2d78e3842ac1360d5addb687cd4bb46397ccc6524c8cef6a9" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.135562 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.147855 4644 scope.go:117] "RemoveContainer" containerID="ada423bf6ef88f9b75a74f07c40ed8cc8886ecc8051d0b1bb8fd9a931ef49917" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.177272 4644 scope.go:117] "RemoveContainer" containerID="9dd6f9d64c68c9cae5c91f7cab905bad7206402b8ba8701ead257605cd19faff" Dec 13 07:02:18 crc kubenswrapper[4644]: E1213 07:02:18.177580 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dd6f9d64c68c9cae5c91f7cab905bad7206402b8ba8701ead257605cd19faff\": container with ID starting with 9dd6f9d64c68c9cae5c91f7cab905bad7206402b8ba8701ead257605cd19faff not found: ID does not exist" containerID="9dd6f9d64c68c9cae5c91f7cab905bad7206402b8ba8701ead257605cd19faff" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.177611 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dd6f9d64c68c9cae5c91f7cab905bad7206402b8ba8701ead257605cd19faff"} err="failed to get container status \"9dd6f9d64c68c9cae5c91f7cab905bad7206402b8ba8701ead257605cd19faff\": rpc error: code = NotFound desc = could not find container \"9dd6f9d64c68c9cae5c91f7cab905bad7206402b8ba8701ead257605cd19faff\": container with ID starting with 9dd6f9d64c68c9cae5c91f7cab905bad7206402b8ba8701ead257605cd19faff not found: ID does not exist" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.177630 4644 scope.go:117] "RemoveContainer" containerID="b514ffc45e3312c2d78e3842ac1360d5addb687cd4bb46397ccc6524c8cef6a9" Dec 13 07:02:18 crc kubenswrapper[4644]: E1213 07:02:18.177879 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b514ffc45e3312c2d78e3842ac1360d5addb687cd4bb46397ccc6524c8cef6a9\": container with ID starting with b514ffc45e3312c2d78e3842ac1360d5addb687cd4bb46397ccc6524c8cef6a9 not found: ID does not exist" containerID="b514ffc45e3312c2d78e3842ac1360d5addb687cd4bb46397ccc6524c8cef6a9" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.177904 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b514ffc45e3312c2d78e3842ac1360d5addb687cd4bb46397ccc6524c8cef6a9"} err="failed to get container status \"b514ffc45e3312c2d78e3842ac1360d5addb687cd4bb46397ccc6524c8cef6a9\": rpc error: code = NotFound desc = could not find container \"b514ffc45e3312c2d78e3842ac1360d5addb687cd4bb46397ccc6524c8cef6a9\": container with ID starting with b514ffc45e3312c2d78e3842ac1360d5addb687cd4bb46397ccc6524c8cef6a9 not found: ID does not exist" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.177920 4644 scope.go:117] "RemoveContainer" containerID="ada423bf6ef88f9b75a74f07c40ed8cc8886ecc8051d0b1bb8fd9a931ef49917" Dec 13 07:02:18 crc kubenswrapper[4644]: E1213 07:02:18.178192 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ada423bf6ef88f9b75a74f07c40ed8cc8886ecc8051d0b1bb8fd9a931ef49917\": container with ID starting with ada423bf6ef88f9b75a74f07c40ed8cc8886ecc8051d0b1bb8fd9a931ef49917 not found: ID does not exist" containerID="ada423bf6ef88f9b75a74f07c40ed8cc8886ecc8051d0b1bb8fd9a931ef49917" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.178208 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ada423bf6ef88f9b75a74f07c40ed8cc8886ecc8051d0b1bb8fd9a931ef49917"} err="failed to get container status \"ada423bf6ef88f9b75a74f07c40ed8cc8886ecc8051d0b1bb8fd9a931ef49917\": rpc error: code = NotFound desc = could not find container \"ada423bf6ef88f9b75a74f07c40ed8cc8886ecc8051d0b1bb8fd9a931ef49917\": container with ID starting with ada423bf6ef88f9b75a74f07c40ed8cc8886ecc8051d0b1bb8fd9a931ef49917 not found: ID does not exist" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.241837 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-skqjj"] Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.247706 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-skqjj"] Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.271156 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf873768-3def-4a7c-b48b-fb3749f8c927-etc-machine-id\") pod \"bf873768-3def-4a7c-b48b-fb3749f8c927\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.271232 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-combined-ca-bundle\") pod \"bf873768-3def-4a7c-b48b-fb3749f8c927\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.271356 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gg5pv\" (UniqueName: \"kubernetes.io/projected/bf873768-3def-4a7c-b48b-fb3749f8c927-kube-api-access-gg5pv\") pod \"bf873768-3def-4a7c-b48b-fb3749f8c927\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.271391 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-config-data\") pod \"bf873768-3def-4a7c-b48b-fb3749f8c927\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.271484 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-db-sync-config-data\") pod \"bf873768-3def-4a7c-b48b-fb3749f8c927\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.271523 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-scripts\") pod \"bf873768-3def-4a7c-b48b-fb3749f8c927\" (UID: \"bf873768-3def-4a7c-b48b-fb3749f8c927\") " Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.272340 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bf873768-3def-4a7c-b48b-fb3749f8c927-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "bf873768-3def-4a7c-b48b-fb3749f8c927" (UID: "bf873768-3def-4a7c-b48b-fb3749f8c927"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.275070 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "bf873768-3def-4a7c-b48b-fb3749f8c927" (UID: "bf873768-3def-4a7c-b48b-fb3749f8c927"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.275169 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf873768-3def-4a7c-b48b-fb3749f8c927-kube-api-access-gg5pv" (OuterVolumeSpecName: "kube-api-access-gg5pv") pod "bf873768-3def-4a7c-b48b-fb3749f8c927" (UID: "bf873768-3def-4a7c-b48b-fb3749f8c927"). InnerVolumeSpecName "kube-api-access-gg5pv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.275294 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-scripts" (OuterVolumeSpecName: "scripts") pod "bf873768-3def-4a7c-b48b-fb3749f8c927" (UID: "bf873768-3def-4a7c-b48b-fb3749f8c927"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.289503 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bf873768-3def-4a7c-b48b-fb3749f8c927" (UID: "bf873768-3def-4a7c-b48b-fb3749f8c927"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.312796 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-config-data" (OuterVolumeSpecName: "config-data") pod "bf873768-3def-4a7c-b48b-fb3749f8c927" (UID: "bf873768-3def-4a7c-b48b-fb3749f8c927"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.373887 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gg5pv\" (UniqueName: \"kubernetes.io/projected/bf873768-3def-4a7c-b48b-fb3749f8c927-kube-api-access-gg5pv\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.373912 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.373922 4644 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.373931 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.373939 4644 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf873768-3def-4a7c-b48b-fb3749f8c927-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.373946 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf873768-3def-4a7c-b48b-fb3749f8c927-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.397037 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3526efa-a3f6-4ec1-8273-e302279281ba" path="/var/lib/kubelet/pods/c3526efa-a3f6-4ec1-8273-e302279281ba/volumes" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.915433 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c601fe2-d854-42ef-885a-e2acc45f1607","Type":"ContainerStarted","Data":"09150e69b995ddb2d86b708dbf2807be2b8d4915c4a8d52d4df4f14e1430ef47"} Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.916423 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="ceilometer-central-agent" containerID="cri-o://4c06db95a2033fa4cd71eb0c60b0ba4f56d460b9fb5c4a9bf9864f402e8bff41" gracePeriod=30 Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.916597 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="proxy-httpd" containerID="cri-o://09150e69b995ddb2d86b708dbf2807be2b8d4915c4a8d52d4df4f14e1430ef47" gracePeriod=30 Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.916709 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="sg-core" containerID="cri-o://5dc7e6e965f8163a120c6a2e67db178392a9c63937bfc8bb91a45883e8f14a0e" gracePeriod=30 Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.916438 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.916903 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="ceilometer-notification-agent" containerID="cri-o://13f94c785a0f578c0adcd5bed3faa201a8196bdaa2565387c561700a05b3f917" gracePeriod=30 Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.917702 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-fdgxf" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.918373 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-fdgxf" event={"ID":"bf873768-3def-4a7c-b48b-fb3749f8c927","Type":"ContainerDied","Data":"80761be2ae57769e5a9b5f1a83eb0f1a4b26a3a276e8fdd3d1e85bead962b705"} Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.918397 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80761be2ae57769e5a9b5f1a83eb0f1a4b26a3a276e8fdd3d1e85bead962b705" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.922929 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-dwfrr" event={"ID":"2f264a4d-92e3-4a69-a07f-00a3f0802484","Type":"ContainerStarted","Data":"8494c487ab0cf5ce802aa106153b692bb8e7ed1afb1731753dc2d7f6f2dc1c09"} Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.937313 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.860512932 podStartE2EDuration="42.937296054s" podCreationTimestamp="2025-12-13 07:01:36 +0000 UTC" firstStartedPulling="2025-12-13 07:01:38.058251195 +0000 UTC m=+960.273202028" lastFinishedPulling="2025-12-13 07:02:18.135034318 +0000 UTC m=+1000.349985150" observedRunningTime="2025-12-13 07:02:18.93467742 +0000 UTC m=+1001.149628253" watchObservedRunningTime="2025-12-13 07:02:18.937296054 +0000 UTC m=+1001.152246887" Dec 13 07:02:18 crc kubenswrapper[4644]: I1213 07:02:18.957531 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-dwfrr" podStartSLOduration=3.270728419 podStartE2EDuration="42.957515455s" podCreationTimestamp="2025-12-13 07:01:36 +0000 UTC" firstStartedPulling="2025-12-13 07:01:38.425984848 +0000 UTC m=+960.640935681" lastFinishedPulling="2025-12-13 07:02:18.112771884 +0000 UTC m=+1000.327722717" observedRunningTime="2025-12-13 07:02:18.951558434 +0000 UTC m=+1001.166509267" watchObservedRunningTime="2025-12-13 07:02:18.957515455 +0000 UTC m=+1001.172466288" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.405929 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 07:02:19 crc kubenswrapper[4644]: E1213 07:02:19.406265 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1a8f072-5116-43bc-9280-288e5eae7827" containerName="init" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.406284 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1a8f072-5116-43bc-9280-288e5eae7827" containerName="init" Dec 13 07:02:19 crc kubenswrapper[4644]: E1213 07:02:19.406298 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1a8f072-5116-43bc-9280-288e5eae7827" containerName="dnsmasq-dns" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.406304 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1a8f072-5116-43bc-9280-288e5eae7827" containerName="dnsmasq-dns" Dec 13 07:02:19 crc kubenswrapper[4644]: E1213 07:02:19.406320 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3526efa-a3f6-4ec1-8273-e302279281ba" containerName="registry-server" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.406326 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3526efa-a3f6-4ec1-8273-e302279281ba" containerName="registry-server" Dec 13 07:02:19 crc kubenswrapper[4644]: E1213 07:02:19.406334 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3526efa-a3f6-4ec1-8273-e302279281ba" containerName="extract-content" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.406340 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3526efa-a3f6-4ec1-8273-e302279281ba" containerName="extract-content" Dec 13 07:02:19 crc kubenswrapper[4644]: E1213 07:02:19.406354 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf873768-3def-4a7c-b48b-fb3749f8c927" containerName="cinder-db-sync" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.406361 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf873768-3def-4a7c-b48b-fb3749f8c927" containerName="cinder-db-sync" Dec 13 07:02:19 crc kubenswrapper[4644]: E1213 07:02:19.406371 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3526efa-a3f6-4ec1-8273-e302279281ba" containerName="extract-utilities" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.406376 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3526efa-a3f6-4ec1-8273-e302279281ba" containerName="extract-utilities" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.414550 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1a8f072-5116-43bc-9280-288e5eae7827" containerName="dnsmasq-dns" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.414588 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf873768-3def-4a7c-b48b-fb3749f8c927" containerName="cinder-db-sync" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.414624 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3526efa-a3f6-4ec1-8273-e302279281ba" containerName="registry-server" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.415634 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.420556 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-jrn9p" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.420860 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.421262 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.421490 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.432578 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.450195 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84bb9688cf-sbq2q"] Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.452034 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.475041 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9688cf-sbq2q"] Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.490718 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.490787 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4rhs\" (UniqueName: \"kubernetes.io/projected/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-kube-api-access-v4rhs\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.490826 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-config-data\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.490891 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.491081 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.491158 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-scripts\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.593261 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-dns-svc\") pod \"dnsmasq-dns-84bb9688cf-sbq2q\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.593542 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.593608 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-ovsdbserver-nb\") pod \"dnsmasq-dns-84bb9688cf-sbq2q\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.593643 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-scripts\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.593672 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-config\") pod \"dnsmasq-dns-84bb9688cf-sbq2q\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.593722 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.593740 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-ovsdbserver-sb\") pod \"dnsmasq-dns-84bb9688cf-sbq2q\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.593778 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d75j6\" (UniqueName: \"kubernetes.io/projected/e7de34db-31e9-4eb0-b53f-c50d1863609c-kube-api-access-d75j6\") pod \"dnsmasq-dns-84bb9688cf-sbq2q\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.593799 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4rhs\" (UniqueName: \"kubernetes.io/projected/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-kube-api-access-v4rhs\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.593831 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-config-data\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.593871 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.594717 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.596658 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.597948 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.602407 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.604383 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-config-data\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.604513 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.606822 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.618372 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-scripts\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.622854 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4rhs\" (UniqueName: \"kubernetes.io/projected/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-kube-api-access-v4rhs\") pod \"cinder-scheduler-0\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.624525 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.697995 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-config\") pod \"dnsmasq-dns-84bb9688cf-sbq2q\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.698079 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-ovsdbserver-sb\") pod \"dnsmasq-dns-84bb9688cf-sbq2q\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.698115 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d75j6\" (UniqueName: \"kubernetes.io/projected/e7de34db-31e9-4eb0-b53f-c50d1863609c-kube-api-access-d75j6\") pod \"dnsmasq-dns-84bb9688cf-sbq2q\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.698157 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-config-data\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.698192 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.698215 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.698304 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-config-data-custom\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.698328 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-dns-svc\") pod \"dnsmasq-dns-84bb9688cf-sbq2q\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.698402 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-scripts\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.698430 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-ovsdbserver-nb\") pod \"dnsmasq-dns-84bb9688cf-sbq2q\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.698473 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-logs\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.698494 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv7xb\" (UniqueName: \"kubernetes.io/projected/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-kube-api-access-dv7xb\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.698918 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-ovsdbserver-sb\") pod \"dnsmasq-dns-84bb9688cf-sbq2q\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.699115 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-dns-svc\") pod \"dnsmasq-dns-84bb9688cf-sbq2q\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.699250 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-config\") pod \"dnsmasq-dns-84bb9688cf-sbq2q\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.699255 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-ovsdbserver-nb\") pod \"dnsmasq-dns-84bb9688cf-sbq2q\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.712372 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d75j6\" (UniqueName: \"kubernetes.io/projected/e7de34db-31e9-4eb0-b53f-c50d1863609c-kube-api-access-d75j6\") pod \"dnsmasq-dns-84bb9688cf-sbq2q\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.748651 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.776278 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.800145 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-scripts\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.800198 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-logs\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.800222 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv7xb\" (UniqueName: \"kubernetes.io/projected/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-kube-api-access-dv7xb\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.800301 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-config-data\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.800330 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.800352 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.800437 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-config-data-custom\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.800877 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-logs\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.800939 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.803996 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-scripts\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.804172 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-config-data-custom\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.805927 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.807001 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-config-data\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.816585 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv7xb\" (UniqueName: \"kubernetes.io/projected/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-kube-api-access-dv7xb\") pod \"cinder-api-0\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " pod="openstack/cinder-api-0" Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.934039 4644 generic.go:334] "Generic (PLEG): container finished" podID="2f264a4d-92e3-4a69-a07f-00a3f0802484" containerID="8494c487ab0cf5ce802aa106153b692bb8e7ed1afb1731753dc2d7f6f2dc1c09" exitCode=0 Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.934109 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-dwfrr" event={"ID":"2f264a4d-92e3-4a69-a07f-00a3f0802484","Type":"ContainerDied","Data":"8494c487ab0cf5ce802aa106153b692bb8e7ed1afb1731753dc2d7f6f2dc1c09"} Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.941467 4644 generic.go:334] "Generic (PLEG): container finished" podID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerID="09150e69b995ddb2d86b708dbf2807be2b8d4915c4a8d52d4df4f14e1430ef47" exitCode=0 Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.941491 4644 generic.go:334] "Generic (PLEG): container finished" podID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerID="5dc7e6e965f8163a120c6a2e67db178392a9c63937bfc8bb91a45883e8f14a0e" exitCode=2 Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.941499 4644 generic.go:334] "Generic (PLEG): container finished" podID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerID="4c06db95a2033fa4cd71eb0c60b0ba4f56d460b9fb5c4a9bf9864f402e8bff41" exitCode=0 Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.941520 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c601fe2-d854-42ef-885a-e2acc45f1607","Type":"ContainerDied","Data":"09150e69b995ddb2d86b708dbf2807be2b8d4915c4a8d52d4df4f14e1430ef47"} Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.941543 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c601fe2-d854-42ef-885a-e2acc45f1607","Type":"ContainerDied","Data":"5dc7e6e965f8163a120c6a2e67db178392a9c63937bfc8bb91a45883e8f14a0e"} Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.941553 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c601fe2-d854-42ef-885a-e2acc45f1607","Type":"ContainerDied","Data":"4c06db95a2033fa4cd71eb0c60b0ba4f56d460b9fb5c4a9bf9864f402e8bff41"} Dec 13 07:02:19 crc kubenswrapper[4644]: I1213 07:02:19.983176 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 13 07:02:20 crc kubenswrapper[4644]: I1213 07:02:20.174019 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 07:02:20 crc kubenswrapper[4644]: I1213 07:02:20.259087 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9688cf-sbq2q"] Dec 13 07:02:20 crc kubenswrapper[4644]: W1213 07:02:20.260303 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7de34db_31e9_4eb0_b53f_c50d1863609c.slice/crio-bdb44cc31026ec984fb840c30cc1b866c2aa3fd3060d5f2baec2b1355669a8dd WatchSource:0}: Error finding container bdb44cc31026ec984fb840c30cc1b866c2aa3fd3060d5f2baec2b1355669a8dd: Status 404 returned error can't find the container with id bdb44cc31026ec984fb840c30cc1b866c2aa3fd3060d5f2baec2b1355669a8dd Dec 13 07:02:20 crc kubenswrapper[4644]: I1213 07:02:20.373311 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 13 07:02:20 crc kubenswrapper[4644]: W1213 07:02:20.374427 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d4c0c21_ad60_43dc_8b03_ac410b6de4e1.slice/crio-1ef5c58c8e6a6ffee1d7f3bfddd8e09457379ff91850ddfc9739a4d4529df82d WatchSource:0}: Error finding container 1ef5c58c8e6a6ffee1d7f3bfddd8e09457379ff91850ddfc9739a4d4529df82d: Status 404 returned error can't find the container with id 1ef5c58c8e6a6ffee1d7f3bfddd8e09457379ff91850ddfc9739a4d4529df82d Dec 13 07:02:20 crc kubenswrapper[4644]: I1213 07:02:20.952531 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1","Type":"ContainerStarted","Data":"f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322"} Dec 13 07:02:20 crc kubenswrapper[4644]: I1213 07:02:20.952846 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1","Type":"ContainerStarted","Data":"1ef5c58c8e6a6ffee1d7f3bfddd8e09457379ff91850ddfc9739a4d4529df82d"} Dec 13 07:02:20 crc kubenswrapper[4644]: I1213 07:02:20.955939 4644 generic.go:334] "Generic (PLEG): container finished" podID="e7de34db-31e9-4eb0-b53f-c50d1863609c" containerID="3e084abb964fe4a292300cfd93c7398fcb2180b597b48184d834429d0966272f" exitCode=0 Dec 13 07:02:20 crc kubenswrapper[4644]: I1213 07:02:20.955991 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" event={"ID":"e7de34db-31e9-4eb0-b53f-c50d1863609c","Type":"ContainerDied","Data":"3e084abb964fe4a292300cfd93c7398fcb2180b597b48184d834429d0966272f"} Dec 13 07:02:20 crc kubenswrapper[4644]: I1213 07:02:20.956043 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" event={"ID":"e7de34db-31e9-4eb0-b53f-c50d1863609c","Type":"ContainerStarted","Data":"bdb44cc31026ec984fb840c30cc1b866c2aa3fd3060d5f2baec2b1355669a8dd"} Dec 13 07:02:20 crc kubenswrapper[4644]: I1213 07:02:20.960222 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ea39e752-3c5e-48d8-926e-ee8eefdacf8a","Type":"ContainerStarted","Data":"a3c3126c099e46942498fa9f796a5caafc8935f1f518afde599c31a0a0bd71e3"} Dec 13 07:02:21 crc kubenswrapper[4644]: I1213 07:02:21.298516 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-dwfrr" Dec 13 07:02:21 crc kubenswrapper[4644]: I1213 07:02:21.447165 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f264a4d-92e3-4a69-a07f-00a3f0802484-combined-ca-bundle\") pod \"2f264a4d-92e3-4a69-a07f-00a3f0802484\" (UID: \"2f264a4d-92e3-4a69-a07f-00a3f0802484\") " Dec 13 07:02:21 crc kubenswrapper[4644]: I1213 07:02:21.447532 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2f264a4d-92e3-4a69-a07f-00a3f0802484-db-sync-config-data\") pod \"2f264a4d-92e3-4a69-a07f-00a3f0802484\" (UID: \"2f264a4d-92e3-4a69-a07f-00a3f0802484\") " Dec 13 07:02:21 crc kubenswrapper[4644]: I1213 07:02:21.447643 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcl79\" (UniqueName: \"kubernetes.io/projected/2f264a4d-92e3-4a69-a07f-00a3f0802484-kube-api-access-xcl79\") pod \"2f264a4d-92e3-4a69-a07f-00a3f0802484\" (UID: \"2f264a4d-92e3-4a69-a07f-00a3f0802484\") " Dec 13 07:02:21 crc kubenswrapper[4644]: I1213 07:02:21.456618 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f264a4d-92e3-4a69-a07f-00a3f0802484-kube-api-access-xcl79" (OuterVolumeSpecName: "kube-api-access-xcl79") pod "2f264a4d-92e3-4a69-a07f-00a3f0802484" (UID: "2f264a4d-92e3-4a69-a07f-00a3f0802484"). InnerVolumeSpecName "kube-api-access-xcl79". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:21 crc kubenswrapper[4644]: I1213 07:02:21.473390 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f264a4d-92e3-4a69-a07f-00a3f0802484-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "2f264a4d-92e3-4a69-a07f-00a3f0802484" (UID: "2f264a4d-92e3-4a69-a07f-00a3f0802484"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:21 crc kubenswrapper[4644]: I1213 07:02:21.476168 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 13 07:02:21 crc kubenswrapper[4644]: I1213 07:02:21.489607 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f264a4d-92e3-4a69-a07f-00a3f0802484-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2f264a4d-92e3-4a69-a07f-00a3f0802484" (UID: "2f264a4d-92e3-4a69-a07f-00a3f0802484"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:21 crc kubenswrapper[4644]: I1213 07:02:21.556918 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f264a4d-92e3-4a69-a07f-00a3f0802484-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:21 crc kubenswrapper[4644]: I1213 07:02:21.556972 4644 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2f264a4d-92e3-4a69-a07f-00a3f0802484-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:21 crc kubenswrapper[4644]: I1213 07:02:21.556985 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcl79\" (UniqueName: \"kubernetes.io/projected/2f264a4d-92e3-4a69-a07f-00a3f0802484-kube-api-access-xcl79\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:21 crc kubenswrapper[4644]: I1213 07:02:21.942981 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.005773 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-dwfrr" event={"ID":"2f264a4d-92e3-4a69-a07f-00a3f0802484","Type":"ContainerDied","Data":"2027dfc1de6129f87011101b2690ed9b2ce4a9d966cc7ad4191cae28d3984feb"} Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.005820 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2027dfc1de6129f87011101b2690ed9b2ce4a9d966cc7ad4191cae28d3984feb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.005893 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-dwfrr" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.026818 4644 generic.go:334] "Generic (PLEG): container finished" podID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerID="13f94c785a0f578c0adcd5bed3faa201a8196bdaa2565387c561700a05b3f917" exitCode=0 Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.026884 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c601fe2-d854-42ef-885a-e2acc45f1607","Type":"ContainerDied","Data":"13f94c785a0f578c0adcd5bed3faa201a8196bdaa2565387c561700a05b3f917"} Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.026910 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c601fe2-d854-42ef-885a-e2acc45f1607","Type":"ContainerDied","Data":"af330df6fcc1b5120451e54749f718a1425d675b8e4e46fe73d601f3490a05b1"} Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.026927 4644 scope.go:117] "RemoveContainer" containerID="09150e69b995ddb2d86b708dbf2807be2b8d4915c4a8d52d4df4f14e1430ef47" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.027054 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.034152 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" event={"ID":"e7de34db-31e9-4eb0-b53f-c50d1863609c","Type":"ContainerStarted","Data":"051c391f56506aa2f2a25ce41843ab61398c3f4bd13f6b7012811686e757129e"} Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.035623 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.037396 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ea39e752-3c5e-48d8-926e-ee8eefdacf8a","Type":"ContainerStarted","Data":"2ea4eb5b4305902e28412848bc25f88b6f33dda8791b6e7b74f684e22a12d3b1"} Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.053745 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1","Type":"ContainerStarted","Data":"93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26"} Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.054648 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.066977 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-scripts\") pod \"7c601fe2-d854-42ef-885a-e2acc45f1607\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.067023 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sphwp\" (UniqueName: \"kubernetes.io/projected/7c601fe2-d854-42ef-885a-e2acc45f1607-kube-api-access-sphwp\") pod \"7c601fe2-d854-42ef-885a-e2acc45f1607\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.067054 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-combined-ca-bundle\") pod \"7c601fe2-d854-42ef-885a-e2acc45f1607\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.067083 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c601fe2-d854-42ef-885a-e2acc45f1607-run-httpd\") pod \"7c601fe2-d854-42ef-885a-e2acc45f1607\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.067175 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c601fe2-d854-42ef-885a-e2acc45f1607-log-httpd\") pod \"7c601fe2-d854-42ef-885a-e2acc45f1607\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.067233 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-sg-core-conf-yaml\") pod \"7c601fe2-d854-42ef-885a-e2acc45f1607\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.067343 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-config-data\") pod \"7c601fe2-d854-42ef-885a-e2acc45f1607\" (UID: \"7c601fe2-d854-42ef-885a-e2acc45f1607\") " Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.068696 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c601fe2-d854-42ef-885a-e2acc45f1607-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7c601fe2-d854-42ef-885a-e2acc45f1607" (UID: "7c601fe2-d854-42ef-885a-e2acc45f1607"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.070417 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c601fe2-d854-42ef-885a-e2acc45f1607-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7c601fe2-d854-42ef-885a-e2acc45f1607" (UID: "7c601fe2-d854-42ef-885a-e2acc45f1607"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.072392 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-scripts" (OuterVolumeSpecName: "scripts") pod "7c601fe2-d854-42ef-885a-e2acc45f1607" (UID: "7c601fe2-d854-42ef-885a-e2acc45f1607"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.079298 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c601fe2-d854-42ef-885a-e2acc45f1607-kube-api-access-sphwp" (OuterVolumeSpecName: "kube-api-access-sphwp") pod "7c601fe2-d854-42ef-885a-e2acc45f1607" (UID: "7c601fe2-d854-42ef-885a-e2acc45f1607"). InnerVolumeSpecName "kube-api-access-sphwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.105778 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" podStartSLOduration=3.105761569 podStartE2EDuration="3.105761569s" podCreationTimestamp="2025-12-13 07:02:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:02:22.096588951 +0000 UTC m=+1004.311539783" watchObservedRunningTime="2025-12-13 07:02:22.105761569 +0000 UTC m=+1004.320712402" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.115924 4644 scope.go:117] "RemoveContainer" containerID="5dc7e6e965f8163a120c6a2e67db178392a9c63937bfc8bb91a45883e8f14a0e" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.137730 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.137709906 podStartE2EDuration="3.137709906s" podCreationTimestamp="2025-12-13 07:02:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:02:22.121632404 +0000 UTC m=+1004.336583236" watchObservedRunningTime="2025-12-13 07:02:22.137709906 +0000 UTC m=+1004.352660739" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.147784 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7c601fe2-d854-42ef-885a-e2acc45f1607" (UID: "7c601fe2-d854-42ef-885a-e2acc45f1607"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.162834 4644 scope.go:117] "RemoveContainer" containerID="13f94c785a0f578c0adcd5bed3faa201a8196bdaa2565387c561700a05b3f917" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.170180 4644 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c601fe2-d854-42ef-885a-e2acc45f1607-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.170204 4644 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.170215 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.170225 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sphwp\" (UniqueName: \"kubernetes.io/projected/7c601fe2-d854-42ef-885a-e2acc45f1607-kube-api-access-sphwp\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.170233 4644 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c601fe2-d854-42ef-885a-e2acc45f1607-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.176925 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c601fe2-d854-42ef-885a-e2acc45f1607" (UID: "7c601fe2-d854-42ef-885a-e2acc45f1607"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.185066 4644 scope.go:117] "RemoveContainer" containerID="4c06db95a2033fa4cd71eb0c60b0ba4f56d460b9fb5c4a9bf9864f402e8bff41" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.204527 4644 scope.go:117] "RemoveContainer" containerID="09150e69b995ddb2d86b708dbf2807be2b8d4915c4a8d52d4df4f14e1430ef47" Dec 13 07:02:22 crc kubenswrapper[4644]: E1213 07:02:22.207575 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09150e69b995ddb2d86b708dbf2807be2b8d4915c4a8d52d4df4f14e1430ef47\": container with ID starting with 09150e69b995ddb2d86b708dbf2807be2b8d4915c4a8d52d4df4f14e1430ef47 not found: ID does not exist" containerID="09150e69b995ddb2d86b708dbf2807be2b8d4915c4a8d52d4df4f14e1430ef47" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.207621 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09150e69b995ddb2d86b708dbf2807be2b8d4915c4a8d52d4df4f14e1430ef47"} err="failed to get container status \"09150e69b995ddb2d86b708dbf2807be2b8d4915c4a8d52d4df4f14e1430ef47\": rpc error: code = NotFound desc = could not find container \"09150e69b995ddb2d86b708dbf2807be2b8d4915c4a8d52d4df4f14e1430ef47\": container with ID starting with 09150e69b995ddb2d86b708dbf2807be2b8d4915c4a8d52d4df4f14e1430ef47 not found: ID does not exist" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.207654 4644 scope.go:117] "RemoveContainer" containerID="5dc7e6e965f8163a120c6a2e67db178392a9c63937bfc8bb91a45883e8f14a0e" Dec 13 07:02:22 crc kubenswrapper[4644]: E1213 07:02:22.208058 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5dc7e6e965f8163a120c6a2e67db178392a9c63937bfc8bb91a45883e8f14a0e\": container with ID starting with 5dc7e6e965f8163a120c6a2e67db178392a9c63937bfc8bb91a45883e8f14a0e not found: ID does not exist" containerID="5dc7e6e965f8163a120c6a2e67db178392a9c63937bfc8bb91a45883e8f14a0e" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.208095 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5dc7e6e965f8163a120c6a2e67db178392a9c63937bfc8bb91a45883e8f14a0e"} err="failed to get container status \"5dc7e6e965f8163a120c6a2e67db178392a9c63937bfc8bb91a45883e8f14a0e\": rpc error: code = NotFound desc = could not find container \"5dc7e6e965f8163a120c6a2e67db178392a9c63937bfc8bb91a45883e8f14a0e\": container with ID starting with 5dc7e6e965f8163a120c6a2e67db178392a9c63937bfc8bb91a45883e8f14a0e not found: ID does not exist" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.208120 4644 scope.go:117] "RemoveContainer" containerID="13f94c785a0f578c0adcd5bed3faa201a8196bdaa2565387c561700a05b3f917" Dec 13 07:02:22 crc kubenswrapper[4644]: E1213 07:02:22.208403 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13f94c785a0f578c0adcd5bed3faa201a8196bdaa2565387c561700a05b3f917\": container with ID starting with 13f94c785a0f578c0adcd5bed3faa201a8196bdaa2565387c561700a05b3f917 not found: ID does not exist" containerID="13f94c785a0f578c0adcd5bed3faa201a8196bdaa2565387c561700a05b3f917" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.208430 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13f94c785a0f578c0adcd5bed3faa201a8196bdaa2565387c561700a05b3f917"} err="failed to get container status \"13f94c785a0f578c0adcd5bed3faa201a8196bdaa2565387c561700a05b3f917\": rpc error: code = NotFound desc = could not find container \"13f94c785a0f578c0adcd5bed3faa201a8196bdaa2565387c561700a05b3f917\": container with ID starting with 13f94c785a0f578c0adcd5bed3faa201a8196bdaa2565387c561700a05b3f917 not found: ID does not exist" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.208465 4644 scope.go:117] "RemoveContainer" containerID="4c06db95a2033fa4cd71eb0c60b0ba4f56d460b9fb5c4a9bf9864f402e8bff41" Dec 13 07:02:22 crc kubenswrapper[4644]: E1213 07:02:22.208860 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c06db95a2033fa4cd71eb0c60b0ba4f56d460b9fb5c4a9bf9864f402e8bff41\": container with ID starting with 4c06db95a2033fa4cd71eb0c60b0ba4f56d460b9fb5c4a9bf9864f402e8bff41 not found: ID does not exist" containerID="4c06db95a2033fa4cd71eb0c60b0ba4f56d460b9fb5c4a9bf9864f402e8bff41" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.209069 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c06db95a2033fa4cd71eb0c60b0ba4f56d460b9fb5c4a9bf9864f402e8bff41"} err="failed to get container status \"4c06db95a2033fa4cd71eb0c60b0ba4f56d460b9fb5c4a9bf9864f402e8bff41\": rpc error: code = NotFound desc = could not find container \"4c06db95a2033fa4cd71eb0c60b0ba4f56d460b9fb5c4a9bf9864f402e8bff41\": container with ID starting with 4c06db95a2033fa4cd71eb0c60b0ba4f56d460b9fb5c4a9bf9864f402e8bff41 not found: ID does not exist" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.211633 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-config-data" (OuterVolumeSpecName: "config-data") pod "7c601fe2-d854-42ef-885a-e2acc45f1607" (UID: "7c601fe2-d854-42ef-885a-e2acc45f1607"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.272520 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.272558 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c601fe2-d854-42ef-885a-e2acc45f1607-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.365987 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.372934 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.382560 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:22 crc kubenswrapper[4644]: E1213 07:02:22.382927 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="ceilometer-central-agent" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.382946 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="ceilometer-central-agent" Dec 13 07:02:22 crc kubenswrapper[4644]: E1213 07:02:22.382958 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="proxy-httpd" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.382965 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="proxy-httpd" Dec 13 07:02:22 crc kubenswrapper[4644]: E1213 07:02:22.382976 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f264a4d-92e3-4a69-a07f-00a3f0802484" containerName="barbican-db-sync" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.382981 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f264a4d-92e3-4a69-a07f-00a3f0802484" containerName="barbican-db-sync" Dec 13 07:02:22 crc kubenswrapper[4644]: E1213 07:02:22.383001 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="sg-core" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.383007 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="sg-core" Dec 13 07:02:22 crc kubenswrapper[4644]: E1213 07:02:22.383029 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="ceilometer-notification-agent" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.383035 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="ceilometer-notification-agent" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.383214 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f264a4d-92e3-4a69-a07f-00a3f0802484" containerName="barbican-db-sync" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.383223 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="ceilometer-central-agent" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.383235 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="ceilometer-notification-agent" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.383247 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="sg-core" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.383256 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" containerName="proxy-httpd" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.388665 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.391409 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.391584 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.402472 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c601fe2-d854-42ef-885a-e2acc45f1607" path="/var/lib/kubelet/pods/7c601fe2-d854-42ef-885a-e2acc45f1607/volumes" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.403262 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.524342 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-56cc649647-pvdcb"] Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.525688 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.528284 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-5td4f" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.528887 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.531016 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.549241 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-56cc649647-pvdcb"] Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.559502 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-8444d897db-7tb2t"] Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.560878 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.565398 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.576902 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-8444d897db-7tb2t"] Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.577416 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vprwr\" (UniqueName: \"kubernetes.io/projected/4c55472a-153d-4fd9-a16d-95a17cfc69c2-kube-api-access-vprwr\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.577498 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4c55472a-153d-4fd9-a16d-95a17cfc69c2-log-httpd\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.577536 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-config-data\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.577594 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.577657 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-scripts\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.577840 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4c55472a-153d-4fd9-a16d-95a17cfc69c2-run-httpd\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.577952 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.619508 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9688cf-sbq2q"] Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.638165 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68c677b759-hjpvx"] Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.665177 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.681904 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2f4d27c-142a-48a0-bbc2-28b24c27f8e3-logs\") pod \"barbican-worker-56cc649647-pvdcb\" (UID: \"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3\") " pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682001 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8c53ee2-0266-496a-858d-20f994ce474e-config-data-custom\") pod \"barbican-keystone-listener-8444d897db-7tb2t\" (UID: \"e8c53ee2-0266-496a-858d-20f994ce474e\") " pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682045 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4c55472a-153d-4fd9-a16d-95a17cfc69c2-run-httpd\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682086 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8c53ee2-0266-496a-858d-20f994ce474e-combined-ca-bundle\") pod \"barbican-keystone-listener-8444d897db-7tb2t\" (UID: \"e8c53ee2-0266-496a-858d-20f994ce474e\") " pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682139 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2f4d27c-142a-48a0-bbc2-28b24c27f8e3-config-data\") pod \"barbican-worker-56cc649647-pvdcb\" (UID: \"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3\") " pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682162 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnsst\" (UniqueName: \"kubernetes.io/projected/d2f4d27c-142a-48a0-bbc2-28b24c27f8e3-kube-api-access-qnsst\") pod \"barbican-worker-56cc649647-pvdcb\" (UID: \"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3\") " pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682192 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682213 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vprwr\" (UniqueName: \"kubernetes.io/projected/4c55472a-153d-4fd9-a16d-95a17cfc69c2-kube-api-access-vprwr\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682247 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4c55472a-153d-4fd9-a16d-95a17cfc69c2-log-httpd\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682279 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-config-data\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682314 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8c53ee2-0266-496a-858d-20f994ce474e-logs\") pod \"barbican-keystone-listener-8444d897db-7tb2t\" (UID: \"e8c53ee2-0266-496a-858d-20f994ce474e\") " pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682332 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d2f4d27c-142a-48a0-bbc2-28b24c27f8e3-config-data-custom\") pod \"barbican-worker-56cc649647-pvdcb\" (UID: \"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3\") " pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682359 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682384 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2f4d27c-142a-48a0-bbc2-28b24c27f8e3-combined-ca-bundle\") pod \"barbican-worker-56cc649647-pvdcb\" (UID: \"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3\") " pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682434 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-scripts\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682503 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrgv4\" (UniqueName: \"kubernetes.io/projected/e8c53ee2-0266-496a-858d-20f994ce474e-kube-api-access-zrgv4\") pod \"barbican-keystone-listener-8444d897db-7tb2t\" (UID: \"e8c53ee2-0266-496a-858d-20f994ce474e\") " pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682525 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8c53ee2-0266-496a-858d-20f994ce474e-config-data\") pod \"barbican-keystone-listener-8444d897db-7tb2t\" (UID: \"e8c53ee2-0266-496a-858d-20f994ce474e\") " pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.682570 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4c55472a-153d-4fd9-a16d-95a17cfc69c2-run-httpd\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.683269 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4c55472a-153d-4fd9-a16d-95a17cfc69c2-log-httpd\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.684699 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68c677b759-hjpvx"] Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.686135 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-scripts\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.689235 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.689388 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-config-data\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.694160 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.703070 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vprwr\" (UniqueName: \"kubernetes.io/projected/4c55472a-153d-4fd9-a16d-95a17cfc69c2-kube-api-access-vprwr\") pod \"ceilometer-0\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.732147 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.754576 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5775886bd4-4n8bf"] Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.756021 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.760628 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.770585 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5775886bd4-4n8bf"] Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.783985 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d2f4d27c-142a-48a0-bbc2-28b24c27f8e3-config-data-custom\") pod \"barbican-worker-56cc649647-pvdcb\" (UID: \"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3\") " pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.784043 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8c53ee2-0266-496a-858d-20f994ce474e-logs\") pod \"barbican-keystone-listener-8444d897db-7tb2t\" (UID: \"e8c53ee2-0266-496a-858d-20f994ce474e\") " pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.784083 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-ovsdbserver-sb\") pod \"dnsmasq-dns-68c677b759-hjpvx\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.784105 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2f4d27c-142a-48a0-bbc2-28b24c27f8e3-combined-ca-bundle\") pod \"barbican-worker-56cc649647-pvdcb\" (UID: \"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3\") " pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.784123 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2pll\" (UniqueName: \"kubernetes.io/projected/69eb14f6-88c6-43e9-89e0-554944201a8e-kube-api-access-m2pll\") pod \"dnsmasq-dns-68c677b759-hjpvx\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.784146 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-ovsdbserver-nb\") pod \"dnsmasq-dns-68c677b759-hjpvx\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.784222 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrgv4\" (UniqueName: \"kubernetes.io/projected/e8c53ee2-0266-496a-858d-20f994ce474e-kube-api-access-zrgv4\") pod \"barbican-keystone-listener-8444d897db-7tb2t\" (UID: \"e8c53ee2-0266-496a-858d-20f994ce474e\") " pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.784244 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8c53ee2-0266-496a-858d-20f994ce474e-config-data\") pod \"barbican-keystone-listener-8444d897db-7tb2t\" (UID: \"e8c53ee2-0266-496a-858d-20f994ce474e\") " pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.784299 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2f4d27c-142a-48a0-bbc2-28b24c27f8e3-logs\") pod \"barbican-worker-56cc649647-pvdcb\" (UID: \"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3\") " pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.784357 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8c53ee2-0266-496a-858d-20f994ce474e-config-data-custom\") pod \"barbican-keystone-listener-8444d897db-7tb2t\" (UID: \"e8c53ee2-0266-496a-858d-20f994ce474e\") " pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.784397 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-dns-svc\") pod \"dnsmasq-dns-68c677b759-hjpvx\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.784417 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-config\") pod \"dnsmasq-dns-68c677b759-hjpvx\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.784438 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8c53ee2-0266-496a-858d-20f994ce474e-combined-ca-bundle\") pod \"barbican-keystone-listener-8444d897db-7tb2t\" (UID: \"e8c53ee2-0266-496a-858d-20f994ce474e\") " pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.784512 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2f4d27c-142a-48a0-bbc2-28b24c27f8e3-config-data\") pod \"barbican-worker-56cc649647-pvdcb\" (UID: \"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3\") " pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.784533 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnsst\" (UniqueName: \"kubernetes.io/projected/d2f4d27c-142a-48a0-bbc2-28b24c27f8e3-kube-api-access-qnsst\") pod \"barbican-worker-56cc649647-pvdcb\" (UID: \"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3\") " pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.786865 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2f4d27c-142a-48a0-bbc2-28b24c27f8e3-logs\") pod \"barbican-worker-56cc649647-pvdcb\" (UID: \"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3\") " pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.787566 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8c53ee2-0266-496a-858d-20f994ce474e-logs\") pod \"barbican-keystone-listener-8444d897db-7tb2t\" (UID: \"e8c53ee2-0266-496a-858d-20f994ce474e\") " pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.788311 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d2f4d27c-142a-48a0-bbc2-28b24c27f8e3-config-data-custom\") pod \"barbican-worker-56cc649647-pvdcb\" (UID: \"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3\") " pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.789650 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8c53ee2-0266-496a-858d-20f994ce474e-config-data\") pod \"barbican-keystone-listener-8444d897db-7tb2t\" (UID: \"e8c53ee2-0266-496a-858d-20f994ce474e\") " pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.791049 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8c53ee2-0266-496a-858d-20f994ce474e-combined-ca-bundle\") pod \"barbican-keystone-listener-8444d897db-7tb2t\" (UID: \"e8c53ee2-0266-496a-858d-20f994ce474e\") " pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.793077 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e8c53ee2-0266-496a-858d-20f994ce474e-config-data-custom\") pod \"barbican-keystone-listener-8444d897db-7tb2t\" (UID: \"e8c53ee2-0266-496a-858d-20f994ce474e\") " pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.799736 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2f4d27c-142a-48a0-bbc2-28b24c27f8e3-combined-ca-bundle\") pod \"barbican-worker-56cc649647-pvdcb\" (UID: \"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3\") " pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.802813 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnsst\" (UniqueName: \"kubernetes.io/projected/d2f4d27c-142a-48a0-bbc2-28b24c27f8e3-kube-api-access-qnsst\") pod \"barbican-worker-56cc649647-pvdcb\" (UID: \"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3\") " pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.802988 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2f4d27c-142a-48a0-bbc2-28b24c27f8e3-config-data\") pod \"barbican-worker-56cc649647-pvdcb\" (UID: \"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3\") " pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.804610 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrgv4\" (UniqueName: \"kubernetes.io/projected/e8c53ee2-0266-496a-858d-20f994ce474e-kube-api-access-zrgv4\") pod \"barbican-keystone-listener-8444d897db-7tb2t\" (UID: \"e8c53ee2-0266-496a-858d-20f994ce474e\") " pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.853658 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-56cc649647-pvdcb" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.885287 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.886559 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d67f22e-3af3-417b-9b53-7b1072cae514-logs\") pod \"barbican-api-5775886bd4-4n8bf\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.886609 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-ovsdbserver-sb\") pod \"dnsmasq-dns-68c677b759-hjpvx\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.886630 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2pll\" (UniqueName: \"kubernetes.io/projected/69eb14f6-88c6-43e9-89e0-554944201a8e-kube-api-access-m2pll\") pod \"dnsmasq-dns-68c677b759-hjpvx\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.886647 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-ovsdbserver-nb\") pod \"dnsmasq-dns-68c677b759-hjpvx\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.886686 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-config-data-custom\") pod \"barbican-api-5775886bd4-4n8bf\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.886734 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmdw9\" (UniqueName: \"kubernetes.io/projected/4d67f22e-3af3-417b-9b53-7b1072cae514-kube-api-access-tmdw9\") pod \"barbican-api-5775886bd4-4n8bf\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.886784 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-config-data\") pod \"barbican-api-5775886bd4-4n8bf\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.886813 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-dns-svc\") pod \"dnsmasq-dns-68c677b759-hjpvx\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.886834 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-config\") pod \"dnsmasq-dns-68c677b759-hjpvx\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.886860 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-combined-ca-bundle\") pod \"barbican-api-5775886bd4-4n8bf\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.888069 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-ovsdbserver-nb\") pod \"dnsmasq-dns-68c677b759-hjpvx\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.888093 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-ovsdbserver-sb\") pod \"dnsmasq-dns-68c677b759-hjpvx\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.888685 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-config\") pod \"dnsmasq-dns-68c677b759-hjpvx\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.889072 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-dns-svc\") pod \"dnsmasq-dns-68c677b759-hjpvx\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.904332 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2pll\" (UniqueName: \"kubernetes.io/projected/69eb14f6-88c6-43e9-89e0-554944201a8e-kube-api-access-m2pll\") pod \"dnsmasq-dns-68c677b759-hjpvx\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.989128 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmdw9\" (UniqueName: \"kubernetes.io/projected/4d67f22e-3af3-417b-9b53-7b1072cae514-kube-api-access-tmdw9\") pod \"barbican-api-5775886bd4-4n8bf\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.989205 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-config-data\") pod \"barbican-api-5775886bd4-4n8bf\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.989262 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-combined-ca-bundle\") pod \"barbican-api-5775886bd4-4n8bf\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.989340 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d67f22e-3af3-417b-9b53-7b1072cae514-logs\") pod \"barbican-api-5775886bd4-4n8bf\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.989432 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-config-data-custom\") pod \"barbican-api-5775886bd4-4n8bf\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.990434 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d67f22e-3af3-417b-9b53-7b1072cae514-logs\") pod \"barbican-api-5775886bd4-4n8bf\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.994948 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-config-data\") pod \"barbican-api-5775886bd4-4n8bf\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.995907 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-combined-ca-bundle\") pod \"barbican-api-5775886bd4-4n8bf\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:22 crc kubenswrapper[4644]: I1213 07:02:22.996315 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-config-data-custom\") pod \"barbican-api-5775886bd4-4n8bf\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.006116 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmdw9\" (UniqueName: \"kubernetes.io/projected/4d67f22e-3af3-417b-9b53-7b1072cae514-kube-api-access-tmdw9\") pod \"barbican-api-5775886bd4-4n8bf\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.060556 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.080601 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ea39e752-3c5e-48d8-926e-ee8eefdacf8a","Type":"ContainerStarted","Data":"f3e24b391c47596529d605792336b95861eebe2ae6f9b4aff52abdae956bab37"} Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.086929 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" containerName="cinder-api-log" containerID="cri-o://f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322" gracePeriod=30 Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.087783 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" containerName="cinder-api" containerID="cri-o://93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26" gracePeriod=30 Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.108694 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.888490764 podStartE2EDuration="4.108670722s" podCreationTimestamp="2025-12-13 07:02:19 +0000 UTC" firstStartedPulling="2025-12-13 07:02:20.18097502 +0000 UTC m=+1002.395925843" lastFinishedPulling="2025-12-13 07:02:21.401154967 +0000 UTC m=+1003.616105801" observedRunningTime="2025-12-13 07:02:23.106517612 +0000 UTC m=+1005.321468446" watchObservedRunningTime="2025-12-13 07:02:23.108670722 +0000 UTC m=+1005.323621555" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.155092 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.177426 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.308497 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-56cc649647-pvdcb"] Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.396047 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-8444d897db-7tb2t"] Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.568068 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68c677b759-hjpvx"] Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.635430 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.671093 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5775886bd4-4n8bf"] Dec 13 07:02:23 crc kubenswrapper[4644]: W1213 07:02:23.694478 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d67f22e_3af3_417b_9b53_7b1072cae514.slice/crio-6b830f57f9ccfbd4d3fe3446ae281b66ece126b5746b630ef6135bf5602fb62b WatchSource:0}: Error finding container 6b830f57f9ccfbd4d3fe3446ae281b66ece126b5746b630ef6135bf5602fb62b: Status 404 returned error can't find the container with id 6b830f57f9ccfbd4d3fe3446ae281b66ece126b5746b630ef6135bf5602fb62b Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.805280 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-etc-machine-id\") pod \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.805502 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-config-data-custom\") pod \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.805380 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" (UID: "8d4c0c21-ad60-43dc-8b03-ac410b6de4e1"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.805584 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-logs\") pod \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.805846 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-config-data\") pod \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.806107 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-scripts\") pod \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.806213 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dv7xb\" (UniqueName: \"kubernetes.io/projected/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-kube-api-access-dv7xb\") pod \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.806318 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-combined-ca-bundle\") pod \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\" (UID: \"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1\") " Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.806062 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-logs" (OuterVolumeSpecName: "logs") pod "8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" (UID: "8d4c0c21-ad60-43dc-8b03-ac410b6de4e1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.807076 4644 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.807136 4644 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-logs\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.809028 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" (UID: "8d4c0c21-ad60-43dc-8b03-ac410b6de4e1"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.809481 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-scripts" (OuterVolumeSpecName: "scripts") pod "8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" (UID: "8d4c0c21-ad60-43dc-8b03-ac410b6de4e1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.809573 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-kube-api-access-dv7xb" (OuterVolumeSpecName: "kube-api-access-dv7xb") pod "8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" (UID: "8d4c0c21-ad60-43dc-8b03-ac410b6de4e1"). InnerVolumeSpecName "kube-api-access-dv7xb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.837885 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" (UID: "8d4c0c21-ad60-43dc-8b03-ac410b6de4e1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.867918 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-config-data" (OuterVolumeSpecName: "config-data") pod "8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" (UID: "8d4c0c21-ad60-43dc-8b03-ac410b6de4e1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.908726 4644 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.908767 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.908781 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.908790 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dv7xb\" (UniqueName: \"kubernetes.io/projected/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-kube-api-access-dv7xb\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:23 crc kubenswrapper[4644]: I1213 07:02:23.908799 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.097159 4644 generic.go:334] "Generic (PLEG): container finished" podID="69eb14f6-88c6-43e9-89e0-554944201a8e" containerID="3c0e7457ae06ed80b08125bc23a5d70c1fba51131f9e5b65a0300256774e9ab9" exitCode=0 Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.097230 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68c677b759-hjpvx" event={"ID":"69eb14f6-88c6-43e9-89e0-554944201a8e","Type":"ContainerDied","Data":"3c0e7457ae06ed80b08125bc23a5d70c1fba51131f9e5b65a0300256774e9ab9"} Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.097422 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68c677b759-hjpvx" event={"ID":"69eb14f6-88c6-43e9-89e0-554944201a8e","Type":"ContainerStarted","Data":"0024bd96d4997e6dae36d74565a1f9a09f5b707bd20e789d68680f21206d7842"} Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.100528 4644 generic.go:334] "Generic (PLEG): container finished" podID="8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" containerID="93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26" exitCode=0 Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.100547 4644 generic.go:334] "Generic (PLEG): container finished" podID="8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" containerID="f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322" exitCode=143 Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.100579 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1","Type":"ContainerDied","Data":"93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26"} Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.100596 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1","Type":"ContainerDied","Data":"f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322"} Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.100607 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8d4c0c21-ad60-43dc-8b03-ac410b6de4e1","Type":"ContainerDied","Data":"1ef5c58c8e6a6ffee1d7f3bfddd8e09457379ff91850ddfc9739a4d4529df82d"} Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.100621 4644 scope.go:117] "RemoveContainer" containerID="93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.100719 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.109499 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4c55472a-153d-4fd9-a16d-95a17cfc69c2","Type":"ContainerStarted","Data":"836074acf177e0ef8c0e4c24d38d5c9ca89c33110cb94c672cae51417ae046d1"} Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.109527 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4c55472a-153d-4fd9-a16d-95a17cfc69c2","Type":"ContainerStarted","Data":"1573351e0323286765420c2707dec2a1c086b5d3b5fb7013c60f22200fcfcdaa"} Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.113491 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" event={"ID":"e8c53ee2-0266-496a-858d-20f994ce474e","Type":"ContainerStarted","Data":"19096c3e55e17c6e6474f863ed943f2355999d5039a0a17b01d3d1f0261328b7"} Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.117120 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5775886bd4-4n8bf" event={"ID":"4d67f22e-3af3-417b-9b53-7b1072cae514","Type":"ContainerStarted","Data":"e4efce6e4ea0e3ec75d9c5f9a594c9d9e0d1ab57eab7d469149d597c7df4142b"} Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.117164 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5775886bd4-4n8bf" event={"ID":"4d67f22e-3af3-417b-9b53-7b1072cae514","Type":"ContainerStarted","Data":"dbdd75a6091402c8469cff64cefa24d9f2d8c8d8301e08b8f0af5d9bdce2f9b5"} Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.117175 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5775886bd4-4n8bf" event={"ID":"4d67f22e-3af3-417b-9b53-7b1072cae514","Type":"ContainerStarted","Data":"6b830f57f9ccfbd4d3fe3446ae281b66ece126b5746b630ef6135bf5602fb62b"} Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.117525 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.117627 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.120209 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-56cc649647-pvdcb" event={"ID":"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3","Type":"ContainerStarted","Data":"8124bdb21bf7750b59225709041981a3cf904a4d20c94b93e30bef5d7b0c61ff"} Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.120413 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" podUID="e7de34db-31e9-4eb0-b53f-c50d1863609c" containerName="dnsmasq-dns" containerID="cri-o://051c391f56506aa2f2a25ce41843ab61398c3f4bd13f6b7012811686e757129e" gracePeriod=10 Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.125795 4644 scope.go:117] "RemoveContainer" containerID="f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.154388 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5775886bd4-4n8bf" podStartSLOduration=2.154369808 podStartE2EDuration="2.154369808s" podCreationTimestamp="2025-12-13 07:02:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:02:24.145163476 +0000 UTC m=+1006.360114309" watchObservedRunningTime="2025-12-13 07:02:24.154369808 +0000 UTC m=+1006.369320640" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.157681 4644 scope.go:117] "RemoveContainer" containerID="93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26" Dec 13 07:02:24 crc kubenswrapper[4644]: E1213 07:02:24.161123 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26\": container with ID starting with 93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26 not found: ID does not exist" containerID="93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.161162 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26"} err="failed to get container status \"93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26\": rpc error: code = NotFound desc = could not find container \"93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26\": container with ID starting with 93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26 not found: ID does not exist" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.161188 4644 scope.go:117] "RemoveContainer" containerID="f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322" Dec 13 07:02:24 crc kubenswrapper[4644]: E1213 07:02:24.161927 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322\": container with ID starting with f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322 not found: ID does not exist" containerID="f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.161955 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322"} err="failed to get container status \"f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322\": rpc error: code = NotFound desc = could not find container \"f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322\": container with ID starting with f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322 not found: ID does not exist" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.161973 4644 scope.go:117] "RemoveContainer" containerID="93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.162242 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26"} err="failed to get container status \"93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26\": rpc error: code = NotFound desc = could not find container \"93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26\": container with ID starting with 93d59c40d97401129fef5095c6fbcfe65ede6ec7b7e90ddcbb7f471b9fa2bc26 not found: ID does not exist" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.162259 4644 scope.go:117] "RemoveContainer" containerID="f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.162563 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322"} err="failed to get container status \"f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322\": rpc error: code = NotFound desc = could not find container \"f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322\": container with ID starting with f84aebbe90b904459e9f9f51ab58317eaede8080e9c802cc0cfe864f4aac5322 not found: ID does not exist" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.167051 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.172300 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.185000 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 13 07:02:24 crc kubenswrapper[4644]: E1213 07:02:24.185359 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" containerName="cinder-api-log" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.185374 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" containerName="cinder-api-log" Dec 13 07:02:24 crc kubenswrapper[4644]: E1213 07:02:24.185418 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" containerName="cinder-api" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.185423 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" containerName="cinder-api" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.192732 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" containerName="cinder-api-log" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.192796 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" containerName="cinder-api" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.193840 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.201283 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.201408 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.201707 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.213582 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.320423 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-config-data-custom\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.320808 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.320916 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnq7v\" (UniqueName: \"kubernetes.io/projected/a21196d3-f0e1-419a-9180-e57eaa042592-kube-api-access-vnq7v\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.320955 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.321033 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a21196d3-f0e1-419a-9180-e57eaa042592-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.321227 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a21196d3-f0e1-419a-9180-e57eaa042592-logs\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.321343 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-scripts\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.321437 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.321618 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-config-data\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.404012 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d4c0c21-ad60-43dc-8b03-ac410b6de4e1" path="/var/lib/kubelet/pods/8d4c0c21-ad60-43dc-8b03-ac410b6de4e1/volumes" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.425618 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a21196d3-f0e1-419a-9180-e57eaa042592-logs\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.428087 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a21196d3-f0e1-419a-9180-e57eaa042592-logs\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.425661 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-scripts\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.428616 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.428701 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-config-data\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.428800 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-config-data-custom\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.428919 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.428971 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnq7v\" (UniqueName: \"kubernetes.io/projected/a21196d3-f0e1-419a-9180-e57eaa042592-kube-api-access-vnq7v\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.428993 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.429141 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a21196d3-f0e1-419a-9180-e57eaa042592-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.429251 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a21196d3-f0e1-419a-9180-e57eaa042592-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.434303 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.437039 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.438256 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-config-data\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.440618 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.445342 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-scripts\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.447401 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnq7v\" (UniqueName: \"kubernetes.io/projected/a21196d3-f0e1-419a-9180-e57eaa042592-kube-api-access-vnq7v\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.456146 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a21196d3-f0e1-419a-9180-e57eaa042592-config-data-custom\") pod \"cinder-api-0\" (UID: \"a21196d3-f0e1-419a-9180-e57eaa042592\") " pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.530582 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.585862 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.735844 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-ovsdbserver-sb\") pod \"e7de34db-31e9-4eb0-b53f-c50d1863609c\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.736117 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-config\") pod \"e7de34db-31e9-4eb0-b53f-c50d1863609c\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.736147 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d75j6\" (UniqueName: \"kubernetes.io/projected/e7de34db-31e9-4eb0-b53f-c50d1863609c-kube-api-access-d75j6\") pod \"e7de34db-31e9-4eb0-b53f-c50d1863609c\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.736176 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-ovsdbserver-nb\") pod \"e7de34db-31e9-4eb0-b53f-c50d1863609c\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.736196 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-dns-svc\") pod \"e7de34db-31e9-4eb0-b53f-c50d1863609c\" (UID: \"e7de34db-31e9-4eb0-b53f-c50d1863609c\") " Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.744214 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7de34db-31e9-4eb0-b53f-c50d1863609c-kube-api-access-d75j6" (OuterVolumeSpecName: "kube-api-access-d75j6") pod "e7de34db-31e9-4eb0-b53f-c50d1863609c" (UID: "e7de34db-31e9-4eb0-b53f-c50d1863609c"). InnerVolumeSpecName "kube-api-access-d75j6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.749091 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.783048 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-config" (OuterVolumeSpecName: "config") pod "e7de34db-31e9-4eb0-b53f-c50d1863609c" (UID: "e7de34db-31e9-4eb0-b53f-c50d1863609c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.789904 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e7de34db-31e9-4eb0-b53f-c50d1863609c" (UID: "e7de34db-31e9-4eb0-b53f-c50d1863609c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.796235 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e7de34db-31e9-4eb0-b53f-c50d1863609c" (UID: "e7de34db-31e9-4eb0-b53f-c50d1863609c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.797650 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e7de34db-31e9-4eb0-b53f-c50d1863609c" (UID: "e7de34db-31e9-4eb0-b53f-c50d1863609c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.839339 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.839385 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d75j6\" (UniqueName: \"kubernetes.io/projected/e7de34db-31e9-4eb0-b53f-c50d1863609c-kube-api-access-d75j6\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.839402 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.839415 4644 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.839425 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e7de34db-31e9-4eb0-b53f-c50d1863609c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:24 crc kubenswrapper[4644]: I1213 07:02:24.998850 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.026007 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.067746 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2jp6k"] Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.127572 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4c55472a-153d-4fd9-a16d-95a17cfc69c2","Type":"ContainerStarted","Data":"7382fb24bae47ca01373fef32f5c42429ed0190de6ed32685f977088b223308c"} Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.129801 4644 generic.go:334] "Generic (PLEG): container finished" podID="e7de34db-31e9-4eb0-b53f-c50d1863609c" containerID="051c391f56506aa2f2a25ce41843ab61398c3f4bd13f6b7012811686e757129e" exitCode=0 Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.129839 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" event={"ID":"e7de34db-31e9-4eb0-b53f-c50d1863609c","Type":"ContainerDied","Data":"051c391f56506aa2f2a25ce41843ab61398c3f4bd13f6b7012811686e757129e"} Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.129854 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" event={"ID":"e7de34db-31e9-4eb0-b53f-c50d1863609c","Type":"ContainerDied","Data":"bdb44cc31026ec984fb840c30cc1b866c2aa3fd3060d5f2baec2b1355669a8dd"} Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.129872 4644 scope.go:117] "RemoveContainer" containerID="051c391f56506aa2f2a25ce41843ab61398c3f4bd13f6b7012811686e757129e" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.129969 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9688cf-sbq2q" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.135415 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68c677b759-hjpvx" event={"ID":"69eb14f6-88c6-43e9-89e0-554944201a8e","Type":"ContainerStarted","Data":"762cf4d78861a7f6a00567d2e978d8882a7352585c0759e2dbadb5783e5a8e49"} Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.135484 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.137676 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2jp6k" podUID="88fd6885-187f-443b-a294-88293678f36b" containerName="registry-server" containerID="cri-o://7ef975efa5375b23cfde12b1affb04e48f0398ce3c0ed23ae7519218b7b7ff8f" gracePeriod=2 Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.155240 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-68c677b759-hjpvx" podStartSLOduration=3.155225228 podStartE2EDuration="3.155225228s" podCreationTimestamp="2025-12-13 07:02:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:02:25.152785009 +0000 UTC m=+1007.367735842" watchObservedRunningTime="2025-12-13 07:02:25.155225228 +0000 UTC m=+1007.370176061" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.180402 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9688cf-sbq2q"] Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.186745 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84bb9688cf-sbq2q"] Dec 13 07:02:25 crc kubenswrapper[4644]: W1213 07:02:25.279561 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda21196d3_f0e1_419a_9180_e57eaa042592.slice/crio-4347f647afd010e3a755540b77efe07bdd18f14cb6fdbcaf348b14209ed4fe95 WatchSource:0}: Error finding container 4347f647afd010e3a755540b77efe07bdd18f14cb6fdbcaf348b14209ed4fe95: Status 404 returned error can't find the container with id 4347f647afd010e3a755540b77efe07bdd18f14cb6fdbcaf348b14209ed4fe95 Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.300862 4644 scope.go:117] "RemoveContainer" containerID="3e084abb964fe4a292300cfd93c7398fcb2180b597b48184d834429d0966272f" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.343055 4644 scope.go:117] "RemoveContainer" containerID="051c391f56506aa2f2a25ce41843ab61398c3f4bd13f6b7012811686e757129e" Dec 13 07:02:25 crc kubenswrapper[4644]: E1213 07:02:25.343553 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"051c391f56506aa2f2a25ce41843ab61398c3f4bd13f6b7012811686e757129e\": container with ID starting with 051c391f56506aa2f2a25ce41843ab61398c3f4bd13f6b7012811686e757129e not found: ID does not exist" containerID="051c391f56506aa2f2a25ce41843ab61398c3f4bd13f6b7012811686e757129e" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.343595 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"051c391f56506aa2f2a25ce41843ab61398c3f4bd13f6b7012811686e757129e"} err="failed to get container status \"051c391f56506aa2f2a25ce41843ab61398c3f4bd13f6b7012811686e757129e\": rpc error: code = NotFound desc = could not find container \"051c391f56506aa2f2a25ce41843ab61398c3f4bd13f6b7012811686e757129e\": container with ID starting with 051c391f56506aa2f2a25ce41843ab61398c3f4bd13f6b7012811686e757129e not found: ID does not exist" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.343618 4644 scope.go:117] "RemoveContainer" containerID="3e084abb964fe4a292300cfd93c7398fcb2180b597b48184d834429d0966272f" Dec 13 07:02:25 crc kubenswrapper[4644]: E1213 07:02:25.343977 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e084abb964fe4a292300cfd93c7398fcb2180b597b48184d834429d0966272f\": container with ID starting with 3e084abb964fe4a292300cfd93c7398fcb2180b597b48184d834429d0966272f not found: ID does not exist" containerID="3e084abb964fe4a292300cfd93c7398fcb2180b597b48184d834429d0966272f" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.344007 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e084abb964fe4a292300cfd93c7398fcb2180b597b48184d834429d0966272f"} err="failed to get container status \"3e084abb964fe4a292300cfd93c7398fcb2180b597b48184d834429d0966272f\": rpc error: code = NotFound desc = could not find container \"3e084abb964fe4a292300cfd93c7398fcb2180b597b48184d834429d0966272f\": container with ID starting with 3e084abb964fe4a292300cfd93c7398fcb2180b597b48184d834429d0966272f not found: ID does not exist" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.549506 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6cb6c55fb8-zbmvj"] Dec 13 07:02:25 crc kubenswrapper[4644]: E1213 07:02:25.550127 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7de34db-31e9-4eb0-b53f-c50d1863609c" containerName="init" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.550232 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7de34db-31e9-4eb0-b53f-c50d1863609c" containerName="init" Dec 13 07:02:25 crc kubenswrapper[4644]: E1213 07:02:25.550263 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7de34db-31e9-4eb0-b53f-c50d1863609c" containerName="dnsmasq-dns" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.550271 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7de34db-31e9-4eb0-b53f-c50d1863609c" containerName="dnsmasq-dns" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.555460 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7de34db-31e9-4eb0-b53f-c50d1863609c" containerName="dnsmasq-dns" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.556451 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.556460 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6cb6c55fb8-zbmvj"] Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.558873 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.559578 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.597695 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.653728 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2806b62c-cd1d-4e9f-97be-70e9129ce932-config-data\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.653800 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2806b62c-cd1d-4e9f-97be-70e9129ce932-logs\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.653897 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2806b62c-cd1d-4e9f-97be-70e9129ce932-public-tls-certs\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.653976 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2806b62c-cd1d-4e9f-97be-70e9129ce932-internal-tls-certs\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.654019 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2806b62c-cd1d-4e9f-97be-70e9129ce932-combined-ca-bundle\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.654309 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2806b62c-cd1d-4e9f-97be-70e9129ce932-config-data-custom\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.654421 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp2pg\" (UniqueName: \"kubernetes.io/projected/2806b62c-cd1d-4e9f-97be-70e9129ce932-kube-api-access-wp2pg\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.755639 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88fd6885-187f-443b-a294-88293678f36b-catalog-content\") pod \"88fd6885-187f-443b-a294-88293678f36b\" (UID: \"88fd6885-187f-443b-a294-88293678f36b\") " Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.755710 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88fd6885-187f-443b-a294-88293678f36b-utilities\") pod \"88fd6885-187f-443b-a294-88293678f36b\" (UID: \"88fd6885-187f-443b-a294-88293678f36b\") " Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.755819 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c48sl\" (UniqueName: \"kubernetes.io/projected/88fd6885-187f-443b-a294-88293678f36b-kube-api-access-c48sl\") pod \"88fd6885-187f-443b-a294-88293678f36b\" (UID: \"88fd6885-187f-443b-a294-88293678f36b\") " Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.756107 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2806b62c-cd1d-4e9f-97be-70e9129ce932-internal-tls-certs\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.756140 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2806b62c-cd1d-4e9f-97be-70e9129ce932-combined-ca-bundle\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.756240 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2806b62c-cd1d-4e9f-97be-70e9129ce932-config-data-custom\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.756284 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp2pg\" (UniqueName: \"kubernetes.io/projected/2806b62c-cd1d-4e9f-97be-70e9129ce932-kube-api-access-wp2pg\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.756313 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2806b62c-cd1d-4e9f-97be-70e9129ce932-config-data\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.756914 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88fd6885-187f-443b-a294-88293678f36b-utilities" (OuterVolumeSpecName: "utilities") pod "88fd6885-187f-443b-a294-88293678f36b" (UID: "88fd6885-187f-443b-a294-88293678f36b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.757286 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2806b62c-cd1d-4e9f-97be-70e9129ce932-logs\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.757612 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2806b62c-cd1d-4e9f-97be-70e9129ce932-logs\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.757687 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2806b62c-cd1d-4e9f-97be-70e9129ce932-public-tls-certs\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.757772 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88fd6885-187f-443b-a294-88293678f36b-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.759949 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88fd6885-187f-443b-a294-88293678f36b-kube-api-access-c48sl" (OuterVolumeSpecName: "kube-api-access-c48sl") pod "88fd6885-187f-443b-a294-88293678f36b" (UID: "88fd6885-187f-443b-a294-88293678f36b"). InnerVolumeSpecName "kube-api-access-c48sl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.764971 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2806b62c-cd1d-4e9f-97be-70e9129ce932-public-tls-certs\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.764996 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2806b62c-cd1d-4e9f-97be-70e9129ce932-internal-tls-certs\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.765011 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2806b62c-cd1d-4e9f-97be-70e9129ce932-combined-ca-bundle\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.769799 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp2pg\" (UniqueName: \"kubernetes.io/projected/2806b62c-cd1d-4e9f-97be-70e9129ce932-kube-api-access-wp2pg\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.769817 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2806b62c-cd1d-4e9f-97be-70e9129ce932-config-data\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.770236 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2806b62c-cd1d-4e9f-97be-70e9129ce932-config-data-custom\") pod \"barbican-api-6cb6c55fb8-zbmvj\" (UID: \"2806b62c-cd1d-4e9f-97be-70e9129ce932\") " pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.797781 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88fd6885-187f-443b-a294-88293678f36b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "88fd6885-187f-443b-a294-88293678f36b" (UID: "88fd6885-187f-443b-a294-88293678f36b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.859207 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c48sl\" (UniqueName: \"kubernetes.io/projected/88fd6885-187f-443b-a294-88293678f36b-kube-api-access-c48sl\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.859234 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88fd6885-187f-443b-a294-88293678f36b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:25 crc kubenswrapper[4644]: I1213 07:02:25.888540 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.161010 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" event={"ID":"e8c53ee2-0266-496a-858d-20f994ce474e","Type":"ContainerStarted","Data":"dc60130ee2f94fb038f07b518183a9c39618d4d0b8219237b9d6011d3b50ed17"} Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.161222 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" event={"ID":"e8c53ee2-0266-496a-858d-20f994ce474e","Type":"ContainerStarted","Data":"98a1c7a4c0dcf542b069700b8cf6fa21b4a37fdbef98a02f38504424a1b2af9a"} Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.168698 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-56cc649647-pvdcb" event={"ID":"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3","Type":"ContainerStarted","Data":"1f91d849312690681621375d109a0183c3996bcea61abaa1a992cae0f8c5f11a"} Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.168732 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-56cc649647-pvdcb" event={"ID":"d2f4d27c-142a-48a0-bbc2-28b24c27f8e3","Type":"ContainerStarted","Data":"9913a0d9046ab816c9123d7733882bb23d10d7bcf5dad2ed15b7d974c8d099a4"} Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.171093 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a21196d3-f0e1-419a-9180-e57eaa042592","Type":"ContainerStarted","Data":"9bf7e99a7fdac8d816b0ccbc7b2f5c036183fbde416a46af02d4284982d90e21"} Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.171143 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a21196d3-f0e1-419a-9180-e57eaa042592","Type":"ContainerStarted","Data":"4347f647afd010e3a755540b77efe07bdd18f14cb6fdbcaf348b14209ed4fe95"} Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.177675 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4c55472a-153d-4fd9-a16d-95a17cfc69c2","Type":"ContainerStarted","Data":"e8f1b91106029dbe15d2e5eb6edb9e115d90127c1dd4aca942cc26886281019f"} Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.178734 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-8444d897db-7tb2t" podStartSLOduration=2.224417239 podStartE2EDuration="4.178719419s" podCreationTimestamp="2025-12-13 07:02:22 +0000 UTC" firstStartedPulling="2025-12-13 07:02:23.404976908 +0000 UTC m=+1005.619927742" lastFinishedPulling="2025-12-13 07:02:25.35927909 +0000 UTC m=+1007.574229922" observedRunningTime="2025-12-13 07:02:26.174971331 +0000 UTC m=+1008.389922164" watchObservedRunningTime="2025-12-13 07:02:26.178719419 +0000 UTC m=+1008.393670252" Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.196921 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-56cc649647-pvdcb" podStartSLOduration=2.233758282 podStartE2EDuration="4.196898493s" podCreationTimestamp="2025-12-13 07:02:22 +0000 UTC" firstStartedPulling="2025-12-13 07:02:23.343985176 +0000 UTC m=+1005.558936010" lastFinishedPulling="2025-12-13 07:02:25.307125388 +0000 UTC m=+1007.522076221" observedRunningTime="2025-12-13 07:02:26.192396258 +0000 UTC m=+1008.407347091" watchObservedRunningTime="2025-12-13 07:02:26.196898493 +0000 UTC m=+1008.411849327" Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.202921 4644 generic.go:334] "Generic (PLEG): container finished" podID="88fd6885-187f-443b-a294-88293678f36b" containerID="7ef975efa5375b23cfde12b1affb04e48f0398ce3c0ed23ae7519218b7b7ff8f" exitCode=0 Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.203980 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2jp6k" Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.207740 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2jp6k" event={"ID":"88fd6885-187f-443b-a294-88293678f36b","Type":"ContainerDied","Data":"7ef975efa5375b23cfde12b1affb04e48f0398ce3c0ed23ae7519218b7b7ff8f"} Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.207885 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2jp6k" event={"ID":"88fd6885-187f-443b-a294-88293678f36b","Type":"ContainerDied","Data":"9c6a7ad9067a2837bf5d06964ff57a419c9c9ea3f437b002358f3c1d2d8b81f1"} Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.207908 4644 scope.go:117] "RemoveContainer" containerID="7ef975efa5375b23cfde12b1affb04e48f0398ce3c0ed23ae7519218b7b7ff8f" Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.250367 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2jp6k"] Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.258329 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2jp6k"] Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.327165 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6cb6c55fb8-zbmvj"] Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.424978 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88fd6885-187f-443b-a294-88293678f36b" path="/var/lib/kubelet/pods/88fd6885-187f-443b-a294-88293678f36b/volumes" Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.425874 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7de34db-31e9-4eb0-b53f-c50d1863609c" path="/var/lib/kubelet/pods/e7de34db-31e9-4eb0-b53f-c50d1863609c/volumes" Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.753427 4644 scope.go:117] "RemoveContainer" containerID="07fcd86043563bb5f1032190351410a21b57b98f2182e73522607e2ab9568609" Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.801728 4644 scope.go:117] "RemoveContainer" containerID="6b8f090c975bd3dc55ef3f3f5bd006743d60d3875d724f3cde0777c271ee5826" Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.876660 4644 scope.go:117] "RemoveContainer" containerID="7ef975efa5375b23cfde12b1affb04e48f0398ce3c0ed23ae7519218b7b7ff8f" Dec 13 07:02:26 crc kubenswrapper[4644]: E1213 07:02:26.877040 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ef975efa5375b23cfde12b1affb04e48f0398ce3c0ed23ae7519218b7b7ff8f\": container with ID starting with 7ef975efa5375b23cfde12b1affb04e48f0398ce3c0ed23ae7519218b7b7ff8f not found: ID does not exist" containerID="7ef975efa5375b23cfde12b1affb04e48f0398ce3c0ed23ae7519218b7b7ff8f" Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.877070 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ef975efa5375b23cfde12b1affb04e48f0398ce3c0ed23ae7519218b7b7ff8f"} err="failed to get container status \"7ef975efa5375b23cfde12b1affb04e48f0398ce3c0ed23ae7519218b7b7ff8f\": rpc error: code = NotFound desc = could not find container \"7ef975efa5375b23cfde12b1affb04e48f0398ce3c0ed23ae7519218b7b7ff8f\": container with ID starting with 7ef975efa5375b23cfde12b1affb04e48f0398ce3c0ed23ae7519218b7b7ff8f not found: ID does not exist" Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.877092 4644 scope.go:117] "RemoveContainer" containerID="07fcd86043563bb5f1032190351410a21b57b98f2182e73522607e2ab9568609" Dec 13 07:02:26 crc kubenswrapper[4644]: E1213 07:02:26.877320 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07fcd86043563bb5f1032190351410a21b57b98f2182e73522607e2ab9568609\": container with ID starting with 07fcd86043563bb5f1032190351410a21b57b98f2182e73522607e2ab9568609 not found: ID does not exist" containerID="07fcd86043563bb5f1032190351410a21b57b98f2182e73522607e2ab9568609" Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.877342 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07fcd86043563bb5f1032190351410a21b57b98f2182e73522607e2ab9568609"} err="failed to get container status \"07fcd86043563bb5f1032190351410a21b57b98f2182e73522607e2ab9568609\": rpc error: code = NotFound desc = could not find container \"07fcd86043563bb5f1032190351410a21b57b98f2182e73522607e2ab9568609\": container with ID starting with 07fcd86043563bb5f1032190351410a21b57b98f2182e73522607e2ab9568609 not found: ID does not exist" Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.877357 4644 scope.go:117] "RemoveContainer" containerID="6b8f090c975bd3dc55ef3f3f5bd006743d60d3875d724f3cde0777c271ee5826" Dec 13 07:02:26 crc kubenswrapper[4644]: E1213 07:02:26.879357 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b8f090c975bd3dc55ef3f3f5bd006743d60d3875d724f3cde0777c271ee5826\": container with ID starting with 6b8f090c975bd3dc55ef3f3f5bd006743d60d3875d724f3cde0777c271ee5826 not found: ID does not exist" containerID="6b8f090c975bd3dc55ef3f3f5bd006743d60d3875d724f3cde0777c271ee5826" Dec 13 07:02:26 crc kubenswrapper[4644]: I1213 07:02:26.879387 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b8f090c975bd3dc55ef3f3f5bd006743d60d3875d724f3cde0777c271ee5826"} err="failed to get container status \"6b8f090c975bd3dc55ef3f3f5bd006743d60d3875d724f3cde0777c271ee5826\": rpc error: code = NotFound desc = could not find container \"6b8f090c975bd3dc55ef3f3f5bd006743d60d3875d724f3cde0777c271ee5826\": container with ID starting with 6b8f090c975bd3dc55ef3f3f5bd006743d60d3875d724f3cde0777c271ee5826 not found: ID does not exist" Dec 13 07:02:27 crc kubenswrapper[4644]: I1213 07:02:27.212410 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a21196d3-f0e1-419a-9180-e57eaa042592","Type":"ContainerStarted","Data":"1750f4b21dcac0bd7f2af26afc43ff8a8c3ea6657fee8823b18834890e1eb355"} Dec 13 07:02:27 crc kubenswrapper[4644]: I1213 07:02:27.213136 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 13 07:02:27 crc kubenswrapper[4644]: I1213 07:02:27.214862 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6cb6c55fb8-zbmvj" event={"ID":"2806b62c-cd1d-4e9f-97be-70e9129ce932","Type":"ContainerStarted","Data":"f5161286ff8d9bbfa6b86f437841a4a935165b41ad8d854a819d9b9fe1d07c2c"} Dec 13 07:02:27 crc kubenswrapper[4644]: I1213 07:02:27.214908 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6cb6c55fb8-zbmvj" event={"ID":"2806b62c-cd1d-4e9f-97be-70e9129ce932","Type":"ContainerStarted","Data":"626157b4012c0e22df3e5ebb27aa54eb5449e6f0d3c4d31ccb0d5a97f0f171e8"} Dec 13 07:02:27 crc kubenswrapper[4644]: I1213 07:02:27.214967 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:27 crc kubenswrapper[4644]: I1213 07:02:27.214986 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6cb6c55fb8-zbmvj" event={"ID":"2806b62c-cd1d-4e9f-97be-70e9129ce932","Type":"ContainerStarted","Data":"26fee1566afb7e34155528731b379f9110dca9be12b887f7cc59a0294f0b94ec"} Dec 13 07:02:27 crc kubenswrapper[4644]: I1213 07:02:27.215001 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:27 crc kubenswrapper[4644]: I1213 07:02:27.217011 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4c55472a-153d-4fd9-a16d-95a17cfc69c2","Type":"ContainerStarted","Data":"60fb582ce93d53b4fe08c3e50f81c71187aa48ad1358221232b02b2544f56cf9"} Dec 13 07:02:27 crc kubenswrapper[4644]: I1213 07:02:27.217145 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 07:02:27 crc kubenswrapper[4644]: I1213 07:02:27.231671 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.231656215 podStartE2EDuration="3.231656215s" podCreationTimestamp="2025-12-13 07:02:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:02:27.225658988 +0000 UTC m=+1009.440609811" watchObservedRunningTime="2025-12-13 07:02:27.231656215 +0000 UTC m=+1009.446607049" Dec 13 07:02:27 crc kubenswrapper[4644]: I1213 07:02:27.248684 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6cb6c55fb8-zbmvj" podStartSLOduration=2.248666512 podStartE2EDuration="2.248666512s" podCreationTimestamp="2025-12-13 07:02:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:02:27.243277448 +0000 UTC m=+1009.458228282" watchObservedRunningTime="2025-12-13 07:02:27.248666512 +0000 UTC m=+1009.463617346" Dec 13 07:02:27 crc kubenswrapper[4644]: I1213 07:02:27.266965 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.589988851 podStartE2EDuration="5.266950344s" podCreationTimestamp="2025-12-13 07:02:22 +0000 UTC" firstStartedPulling="2025-12-13 07:02:23.206980978 +0000 UTC m=+1005.421931810" lastFinishedPulling="2025-12-13 07:02:26.88394247 +0000 UTC m=+1009.098893303" observedRunningTime="2025-12-13 07:02:27.263790402 +0000 UTC m=+1009.478741235" watchObservedRunningTime="2025-12-13 07:02:27.266950344 +0000 UTC m=+1009.481901177" Dec 13 07:02:27 crc kubenswrapper[4644]: I1213 07:02:27.282879 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:02:27 crc kubenswrapper[4644]: I1213 07:02:27.366047 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:02:28 crc kubenswrapper[4644]: I1213 07:02:28.770805 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:02:28 crc kubenswrapper[4644]: I1213 07:02:28.806573 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-668dddc65b-wlzwz" Dec 13 07:02:28 crc kubenswrapper[4644]: I1213 07:02:28.861427 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7dc5bbd594-tswgs"] Dec 13 07:02:29 crc kubenswrapper[4644]: I1213 07:02:29.233331 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7dc5bbd594-tswgs" podUID="fa9ae998-069d-4264-a6d3-2a8f51373524" containerName="horizon-log" containerID="cri-o://6d04b355eab221d23c1b8cfaf6910e1003738fd4205b9839ef41f687e14d4e45" gracePeriod=30 Dec 13 07:02:29 crc kubenswrapper[4644]: I1213 07:02:29.233366 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7dc5bbd594-tswgs" podUID="fa9ae998-069d-4264-a6d3-2a8f51373524" containerName="horizon" containerID="cri-o://c8e61f66d6f403376205f93aedfe0ee73826f002c15e69a024c618f920b7ed96" gracePeriod=30 Dec 13 07:02:29 crc kubenswrapper[4644]: I1213 07:02:29.512499 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:29 crc kubenswrapper[4644]: I1213 07:02:29.949029 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 13 07:02:29 crc kubenswrapper[4644]: I1213 07:02:29.997283 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 07:02:30 crc kubenswrapper[4644]: I1213 07:02:30.244557 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ea39e752-3c5e-48d8-926e-ee8eefdacf8a" containerName="cinder-scheduler" containerID="cri-o://2ea4eb5b4305902e28412848bc25f88b6f33dda8791b6e7b74f684e22a12d3b1" gracePeriod=30 Dec 13 07:02:30 crc kubenswrapper[4644]: I1213 07:02:30.244709 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ea39e752-3c5e-48d8-926e-ee8eefdacf8a" containerName="probe" containerID="cri-o://f3e24b391c47596529d605792336b95861eebe2ae6f9b4aff52abdae956bab37" gracePeriod=30 Dec 13 07:02:30 crc kubenswrapper[4644]: I1213 07:02:30.725230 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:31 crc kubenswrapper[4644]: I1213 07:02:31.252361 4644 generic.go:334] "Generic (PLEG): container finished" podID="ea39e752-3c5e-48d8-926e-ee8eefdacf8a" containerID="f3e24b391c47596529d605792336b95861eebe2ae6f9b4aff52abdae956bab37" exitCode=0 Dec 13 07:02:31 crc kubenswrapper[4644]: I1213 07:02:31.252390 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ea39e752-3c5e-48d8-926e-ee8eefdacf8a","Type":"ContainerDied","Data":"f3e24b391c47596529d605792336b95861eebe2ae6f9b4aff52abdae956bab37"} Dec 13 07:02:31 crc kubenswrapper[4644]: I1213 07:02:31.660872 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.066636 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.122382 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f69459849-4h2fh"] Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.122606 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7f69459849-4h2fh" podUID="e0607341-6c2d-41db-8350-17f32d48aedc" containerName="dnsmasq-dns" containerID="cri-o://659a265edd086110a058d36a2c66fad80a6f537ca32f3bb654b6d0287f308a9b" gracePeriod=10 Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.284781 4644 generic.go:334] "Generic (PLEG): container finished" podID="e0607341-6c2d-41db-8350-17f32d48aedc" containerID="659a265edd086110a058d36a2c66fad80a6f537ca32f3bb654b6d0287f308a9b" exitCode=0 Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.284877 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f69459849-4h2fh" event={"ID":"e0607341-6c2d-41db-8350-17f32d48aedc","Type":"ContainerDied","Data":"659a265edd086110a058d36a2c66fad80a6f537ca32f3bb654b6d0287f308a9b"} Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.286722 4644 generic.go:334] "Generic (PLEG): container finished" podID="fa9ae998-069d-4264-a6d3-2a8f51373524" containerID="c8e61f66d6f403376205f93aedfe0ee73826f002c15e69a024c618f920b7ed96" exitCode=0 Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.286783 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7dc5bbd594-tswgs" event={"ID":"fa9ae998-069d-4264-a6d3-2a8f51373524","Type":"ContainerDied","Data":"c8e61f66d6f403376205f93aedfe0ee73826f002c15e69a024c618f920b7ed96"} Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.288917 4644 generic.go:334] "Generic (PLEG): container finished" podID="ea39e752-3c5e-48d8-926e-ee8eefdacf8a" containerID="2ea4eb5b4305902e28412848bc25f88b6f33dda8791b6e7b74f684e22a12d3b1" exitCode=0 Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.288954 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ea39e752-3c5e-48d8-926e-ee8eefdacf8a","Type":"ContainerDied","Data":"2ea4eb5b4305902e28412848bc25f88b6f33dda8791b6e7b74f684e22a12d3b1"} Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.421458 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.505622 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-etc-machine-id\") pod \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.505672 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4rhs\" (UniqueName: \"kubernetes.io/projected/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-kube-api-access-v4rhs\") pod \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.505733 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-combined-ca-bundle\") pod \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.505733 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ea39e752-3c5e-48d8-926e-ee8eefdacf8a" (UID: "ea39e752-3c5e-48d8-926e-ee8eefdacf8a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.505784 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-config-data\") pod \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.505964 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-config-data-custom\") pod \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.505985 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-scripts\") pod \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\" (UID: \"ea39e752-3c5e-48d8-926e-ee8eefdacf8a\") " Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.506652 4644 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.511186 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-kube-api-access-v4rhs" (OuterVolumeSpecName: "kube-api-access-v4rhs") pod "ea39e752-3c5e-48d8-926e-ee8eefdacf8a" (UID: "ea39e752-3c5e-48d8-926e-ee8eefdacf8a"). InnerVolumeSpecName "kube-api-access-v4rhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.512791 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ea39e752-3c5e-48d8-926e-ee8eefdacf8a" (UID: "ea39e752-3c5e-48d8-926e-ee8eefdacf8a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.515290 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-scripts" (OuterVolumeSpecName: "scripts") pod "ea39e752-3c5e-48d8-926e-ee8eefdacf8a" (UID: "ea39e752-3c5e-48d8-926e-ee8eefdacf8a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.544558 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea39e752-3c5e-48d8-926e-ee8eefdacf8a" (UID: "ea39e752-3c5e-48d8-926e-ee8eefdacf8a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.581075 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-config-data" (OuterVolumeSpecName: "config-data") pod "ea39e752-3c5e-48d8-926e-ee8eefdacf8a" (UID: "ea39e752-3c5e-48d8-926e-ee8eefdacf8a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.608803 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4rhs\" (UniqueName: \"kubernetes.io/projected/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-kube-api-access-v4rhs\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.608829 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.608839 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.608849 4644 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.608857 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea39e752-3c5e-48d8-926e-ee8eefdacf8a-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.627599 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.709863 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-dns-svc\") pod \"e0607341-6c2d-41db-8350-17f32d48aedc\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.709913 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-ovsdbserver-sb\") pod \"e0607341-6c2d-41db-8350-17f32d48aedc\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.709970 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-ovsdbserver-nb\") pod \"e0607341-6c2d-41db-8350-17f32d48aedc\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.710070 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-config\") pod \"e0607341-6c2d-41db-8350-17f32d48aedc\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.710145 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ttrh\" (UniqueName: \"kubernetes.io/projected/e0607341-6c2d-41db-8350-17f32d48aedc-kube-api-access-5ttrh\") pod \"e0607341-6c2d-41db-8350-17f32d48aedc\" (UID: \"e0607341-6c2d-41db-8350-17f32d48aedc\") " Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.713849 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0607341-6c2d-41db-8350-17f32d48aedc-kube-api-access-5ttrh" (OuterVolumeSpecName: "kube-api-access-5ttrh") pod "e0607341-6c2d-41db-8350-17f32d48aedc" (UID: "e0607341-6c2d-41db-8350-17f32d48aedc"). InnerVolumeSpecName "kube-api-access-5ttrh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.714149 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-589fc5f8d9-v292l" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.749124 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-config" (OuterVolumeSpecName: "config") pod "e0607341-6c2d-41db-8350-17f32d48aedc" (UID: "e0607341-6c2d-41db-8350-17f32d48aedc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.754350 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e0607341-6c2d-41db-8350-17f32d48aedc" (UID: "e0607341-6c2d-41db-8350-17f32d48aedc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.765000 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e0607341-6c2d-41db-8350-17f32d48aedc" (UID: "e0607341-6c2d-41db-8350-17f32d48aedc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.778518 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-85586f897b-4b47l"] Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.778719 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-85586f897b-4b47l" podUID="35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" containerName="neutron-api" containerID="cri-o://26ca95e03e55f22c0404cf6fee0e21bee0f31128ea90589eada6a053eed352e9" gracePeriod=30 Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.784129 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-85586f897b-4b47l" podUID="35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" containerName="neutron-httpd" containerID="cri-o://05f6ca39a0251b40d12afc00d55c4ec5e4fc967000cd617e5ba52ce8830e779d" gracePeriod=30 Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.797801 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e0607341-6c2d-41db-8350-17f32d48aedc" (UID: "e0607341-6c2d-41db-8350-17f32d48aedc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.812283 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.812313 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.812323 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ttrh\" (UniqueName: \"kubernetes.io/projected/e0607341-6c2d-41db-8350-17f32d48aedc-kube-api-access-5ttrh\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.812333 4644 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:33 crc kubenswrapper[4644]: I1213 07:02:33.812342 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e0607341-6c2d-41db-8350-17f32d48aedc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.297003 4644 generic.go:334] "Generic (PLEG): container finished" podID="35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" containerID="05f6ca39a0251b40d12afc00d55c4ec5e4fc967000cd617e5ba52ce8830e779d" exitCode=0 Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.297186 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-85586f897b-4b47l" event={"ID":"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a","Type":"ContainerDied","Data":"05f6ca39a0251b40d12afc00d55c4ec5e4fc967000cd617e5ba52ce8830e779d"} Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.298590 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ea39e752-3c5e-48d8-926e-ee8eefdacf8a","Type":"ContainerDied","Data":"a3c3126c099e46942498fa9f796a5caafc8935f1f518afde599c31a0a0bd71e3"} Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.298632 4644 scope.go:117] "RemoveContainer" containerID="f3e24b391c47596529d605792336b95861eebe2ae6f9b4aff52abdae956bab37" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.298650 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.303999 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f69459849-4h2fh" event={"ID":"e0607341-6c2d-41db-8350-17f32d48aedc","Type":"ContainerDied","Data":"85407098b936d4f04f86c3e42cb75f7e81fe796dafbdeff2a710d76a5bd7d0b6"} Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.304075 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f69459849-4h2fh" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.319570 4644 scope.go:117] "RemoveContainer" containerID="2ea4eb5b4305902e28412848bc25f88b6f33dda8791b6e7b74f684e22a12d3b1" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.329654 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.341790 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.367125 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f69459849-4h2fh"] Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.370968 4644 scope.go:117] "RemoveContainer" containerID="659a265edd086110a058d36a2c66fad80a6f537ca32f3bb654b6d0287f308a9b" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.377601 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f69459849-4h2fh"] Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.395107 4644 scope.go:117] "RemoveContainer" containerID="a0a1c7c45bdea2e04b596f1d3aae381e9131a7d28cfdaab4a2ec0de4f907d002" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.399992 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0607341-6c2d-41db-8350-17f32d48aedc" path="/var/lib/kubelet/pods/e0607341-6c2d-41db-8350-17f32d48aedc/volumes" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.400739 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea39e752-3c5e-48d8-926e-ee8eefdacf8a" path="/var/lib/kubelet/pods/ea39e752-3c5e-48d8-926e-ee8eefdacf8a/volumes" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.401363 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 07:02:34 crc kubenswrapper[4644]: E1213 07:02:34.402420 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0607341-6c2d-41db-8350-17f32d48aedc" containerName="dnsmasq-dns" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.402832 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0607341-6c2d-41db-8350-17f32d48aedc" containerName="dnsmasq-dns" Dec 13 07:02:34 crc kubenswrapper[4644]: E1213 07:02:34.402857 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea39e752-3c5e-48d8-926e-ee8eefdacf8a" containerName="cinder-scheduler" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.402866 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea39e752-3c5e-48d8-926e-ee8eefdacf8a" containerName="cinder-scheduler" Dec 13 07:02:34 crc kubenswrapper[4644]: E1213 07:02:34.402876 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88fd6885-187f-443b-a294-88293678f36b" containerName="extract-content" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.402882 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="88fd6885-187f-443b-a294-88293678f36b" containerName="extract-content" Dec 13 07:02:34 crc kubenswrapper[4644]: E1213 07:02:34.402895 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88fd6885-187f-443b-a294-88293678f36b" containerName="registry-server" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.402901 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="88fd6885-187f-443b-a294-88293678f36b" containerName="registry-server" Dec 13 07:02:34 crc kubenswrapper[4644]: E1213 07:02:34.402923 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea39e752-3c5e-48d8-926e-ee8eefdacf8a" containerName="probe" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.402928 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea39e752-3c5e-48d8-926e-ee8eefdacf8a" containerName="probe" Dec 13 07:02:34 crc kubenswrapper[4644]: E1213 07:02:34.402938 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0607341-6c2d-41db-8350-17f32d48aedc" containerName="init" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.402943 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0607341-6c2d-41db-8350-17f32d48aedc" containerName="init" Dec 13 07:02:34 crc kubenswrapper[4644]: E1213 07:02:34.402950 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88fd6885-187f-443b-a294-88293678f36b" containerName="extract-utilities" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.402955 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="88fd6885-187f-443b-a294-88293678f36b" containerName="extract-utilities" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.403186 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="88fd6885-187f-443b-a294-88293678f36b" containerName="registry-server" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.403207 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0607341-6c2d-41db-8350-17f32d48aedc" containerName="dnsmasq-dns" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.403233 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea39e752-3c5e-48d8-926e-ee8eefdacf8a" containerName="cinder-scheduler" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.403251 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea39e752-3c5e-48d8-926e-ee8eefdacf8a" containerName="probe" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.404212 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.404347 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.407179 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.423325 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.423373 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-config-data\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.423426 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9b44\" (UniqueName: \"kubernetes.io/projected/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-kube-api-access-w9b44\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.423519 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.423592 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.423616 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-scripts\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.527496 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.527537 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-config-data\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.527561 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9b44\" (UniqueName: \"kubernetes.io/projected/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-kube-api-access-w9b44\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.527605 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.527641 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.527659 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-scripts\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.528086 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.532217 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.532975 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-scripts\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.533478 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-config-data\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.546911 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.546946 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9b44\" (UniqueName: \"kubernetes.io/projected/a40ebe98-ffe1-4a0f-8d7c-0f803ed45669-kube-api-access-w9b44\") pod \"cinder-scheduler-0\" (UID: \"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669\") " pod="openstack/cinder-scheduler-0" Dec 13 07:02:34 crc kubenswrapper[4644]: I1213 07:02:34.723186 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 13 07:02:35 crc kubenswrapper[4644]: W1213 07:02:35.135183 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda40ebe98_ffe1_4a0f_8d7c_0f803ed45669.slice/crio-9c39640c921016c82f0bc6a00c774d089b360e7e7bac03e94f00f9877bb5aa6a WatchSource:0}: Error finding container 9c39640c921016c82f0bc6a00c774d089b360e7e7bac03e94f00f9877bb5aa6a: Status 404 returned error can't find the container with id 9c39640c921016c82f0bc6a00c774d089b360e7e7bac03e94f00f9877bb5aa6a Dec 13 07:02:35 crc kubenswrapper[4644]: I1213 07:02:35.135756 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 13 07:02:35 crc kubenswrapper[4644]: I1213 07:02:35.316098 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669","Type":"ContainerStarted","Data":"9c39640c921016c82f0bc6a00c774d089b360e7e7bac03e94f00f9877bb5aa6a"} Dec 13 07:02:35 crc kubenswrapper[4644]: I1213 07:02:35.483308 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7dc5bbd594-tswgs" podUID="fa9ae998-069d-4264-a6d3-2a8f51373524" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.139:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.139:8443: connect: connection refused" Dec 13 07:02:36 crc kubenswrapper[4644]: I1213 07:02:36.304475 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 13 07:02:36 crc kubenswrapper[4644]: I1213 07:02:36.324721 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669","Type":"ContainerStarted","Data":"a689bbdc796c0a0b0951bcb55b1700931f182745fd174c5ba9835daf05048667"} Dec 13 07:02:36 crc kubenswrapper[4644]: I1213 07:02:36.324757 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a40ebe98-ffe1-4a0f-8d7c-0f803ed45669","Type":"ContainerStarted","Data":"32d8f7bfcd2ff303b6f09ee18458e0232c4063841ceb1b6185d45000f2d8689c"} Dec 13 07:02:36 crc kubenswrapper[4644]: I1213 07:02:36.348264 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.34824509 podStartE2EDuration="2.34824509s" podCreationTimestamp="2025-12-13 07:02:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:02:36.342795433 +0000 UTC m=+1018.557746265" watchObservedRunningTime="2025-12-13 07:02:36.34824509 +0000 UTC m=+1018.563195924" Dec 13 07:02:37 crc kubenswrapper[4644]: I1213 07:02:37.220265 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:37 crc kubenswrapper[4644]: I1213 07:02:37.277503 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6cb6c55fb8-zbmvj" Dec 13 07:02:37 crc kubenswrapper[4644]: I1213 07:02:37.319334 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5775886bd4-4n8bf"] Dec 13 07:02:37 crc kubenswrapper[4644]: I1213 07:02:37.319598 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5775886bd4-4n8bf" podUID="4d67f22e-3af3-417b-9b53-7b1072cae514" containerName="barbican-api-log" containerID="cri-o://dbdd75a6091402c8469cff64cefa24d9f2d8c8d8301e08b8f0af5d9bdce2f9b5" gracePeriod=30 Dec 13 07:02:37 crc kubenswrapper[4644]: I1213 07:02:37.319699 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5775886bd4-4n8bf" podUID="4d67f22e-3af3-417b-9b53-7b1072cae514" containerName="barbican-api" containerID="cri-o://e4efce6e4ea0e3ec75d9c5f9a594c9d9e0d1ab57eab7d469149d597c7df4142b" gracePeriod=30 Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.005858 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.015824 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-8f5479c4d-vcv6r" Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.360023 4644 generic.go:334] "Generic (PLEG): container finished" podID="4d67f22e-3af3-417b-9b53-7b1072cae514" containerID="dbdd75a6091402c8469cff64cefa24d9f2d8c8d8301e08b8f0af5d9bdce2f9b5" exitCode=143 Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.360154 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5775886bd4-4n8bf" event={"ID":"4d67f22e-3af3-417b-9b53-7b1072cae514","Type":"ContainerDied","Data":"dbdd75a6091402c8469cff64cefa24d9f2d8c8d8301e08b8f0af5d9bdce2f9b5"} Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.362660 4644 generic.go:334] "Generic (PLEG): container finished" podID="35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" containerID="26ca95e03e55f22c0404cf6fee0e21bee0f31128ea90589eada6a053eed352e9" exitCode=0 Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.362749 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-85586f897b-4b47l" event={"ID":"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a","Type":"ContainerDied","Data":"26ca95e03e55f22c0404cf6fee0e21bee0f31128ea90589eada6a053eed352e9"} Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.362791 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-85586f897b-4b47l" event={"ID":"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a","Type":"ContainerDied","Data":"24467e9613178e2adf913a0a968ca78150ae8a7a92e4df4c30593de6baef2939"} Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.362804 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24467e9613178e2adf913a0a968ca78150ae8a7a92e4df4c30593de6baef2939" Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.369789 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.402397 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-combined-ca-bundle\") pod \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.402535 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-config\") pod \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.402568 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-httpd-config\") pod \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.402595 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-ovndb-tls-certs\") pod \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.402724 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-np97m\" (UniqueName: \"kubernetes.io/projected/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-kube-api-access-np97m\") pod \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\" (UID: \"35cbfc52-85ec-45cd-9083-d1be4bbf4a0a\") " Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.407544 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" (UID: "35cbfc52-85ec-45cd-9083-d1be4bbf4a0a"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.413576 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-kube-api-access-np97m" (OuterVolumeSpecName: "kube-api-access-np97m") pod "35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" (UID: "35cbfc52-85ec-45cd-9083-d1be4bbf4a0a"). InnerVolumeSpecName "kube-api-access-np97m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.462245 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-config" (OuterVolumeSpecName: "config") pod "35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" (UID: "35cbfc52-85ec-45cd-9083-d1be4bbf4a0a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.487064 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" (UID: "35cbfc52-85ec-45cd-9083-d1be4bbf4a0a"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.489025 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" (UID: "35cbfc52-85ec-45cd-9083-d1be4bbf4a0a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.504838 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.504855 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.504866 4644 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.504876 4644 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:38 crc kubenswrapper[4644]: I1213 07:02:38.504884 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-np97m\" (UniqueName: \"kubernetes.io/projected/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a-kube-api-access-np97m\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:39 crc kubenswrapper[4644]: I1213 07:02:39.369570 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-85586f897b-4b47l" Dec 13 07:02:39 crc kubenswrapper[4644]: I1213 07:02:39.394947 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-85586f897b-4b47l"] Dec 13 07:02:39 crc kubenswrapper[4644]: I1213 07:02:39.403910 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-85586f897b-4b47l"] Dec 13 07:02:39 crc kubenswrapper[4644]: I1213 07:02:39.723981 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 13 07:02:40 crc kubenswrapper[4644]: I1213 07:02:40.397183 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" path="/var/lib/kubelet/pods/35cbfc52-85ec-45cd-9083-d1be4bbf4a0a/volumes" Dec 13 07:02:40 crc kubenswrapper[4644]: I1213 07:02:40.458748 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5775886bd4-4n8bf" podUID="4d67f22e-3af3-417b-9b53-7b1072cae514" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": read tcp 10.217.0.2:58302->10.217.0.155:9311: read: connection reset by peer" Dec 13 07:02:40 crc kubenswrapper[4644]: I1213 07:02:40.458794 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5775886bd4-4n8bf" podUID="4d67f22e-3af3-417b-9b53-7b1072cae514" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.155:9311/healthcheck\": read tcp 10.217.0.2:58316->10.217.0.155:9311: read: connection reset by peer" Dec 13 07:02:40 crc kubenswrapper[4644]: I1213 07:02:40.543126 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-84867c6db9-t2k7k" Dec 13 07:02:40 crc kubenswrapper[4644]: I1213 07:02:40.905528 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:40 crc kubenswrapper[4644]: I1213 07:02:40.972705 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-combined-ca-bundle\") pod \"4d67f22e-3af3-417b-9b53-7b1072cae514\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " Dec 13 07:02:40 crc kubenswrapper[4644]: I1213 07:02:40.972836 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d67f22e-3af3-417b-9b53-7b1072cae514-logs\") pod \"4d67f22e-3af3-417b-9b53-7b1072cae514\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " Dec 13 07:02:40 crc kubenswrapper[4644]: I1213 07:02:40.972872 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmdw9\" (UniqueName: \"kubernetes.io/projected/4d67f22e-3af3-417b-9b53-7b1072cae514-kube-api-access-tmdw9\") pod \"4d67f22e-3af3-417b-9b53-7b1072cae514\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " Dec 13 07:02:40 crc kubenswrapper[4644]: I1213 07:02:40.972895 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-config-data\") pod \"4d67f22e-3af3-417b-9b53-7b1072cae514\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " Dec 13 07:02:40 crc kubenswrapper[4644]: I1213 07:02:40.972919 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-config-data-custom\") pod \"4d67f22e-3af3-417b-9b53-7b1072cae514\" (UID: \"4d67f22e-3af3-417b-9b53-7b1072cae514\") " Dec 13 07:02:40 crc kubenswrapper[4644]: I1213 07:02:40.973359 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d67f22e-3af3-417b-9b53-7b1072cae514-logs" (OuterVolumeSpecName: "logs") pod "4d67f22e-3af3-417b-9b53-7b1072cae514" (UID: "4d67f22e-3af3-417b-9b53-7b1072cae514"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:40 crc kubenswrapper[4644]: I1213 07:02:40.973840 4644 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d67f22e-3af3-417b-9b53-7b1072cae514-logs\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:40 crc kubenswrapper[4644]: I1213 07:02:40.977563 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4d67f22e-3af3-417b-9b53-7b1072cae514" (UID: "4d67f22e-3af3-417b-9b53-7b1072cae514"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:40 crc kubenswrapper[4644]: I1213 07:02:40.977669 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d67f22e-3af3-417b-9b53-7b1072cae514-kube-api-access-tmdw9" (OuterVolumeSpecName: "kube-api-access-tmdw9") pod "4d67f22e-3af3-417b-9b53-7b1072cae514" (UID: "4d67f22e-3af3-417b-9b53-7b1072cae514"). InnerVolumeSpecName "kube-api-access-tmdw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:40 crc kubenswrapper[4644]: I1213 07:02:40.995608 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4d67f22e-3af3-417b-9b53-7b1072cae514" (UID: "4d67f22e-3af3-417b-9b53-7b1072cae514"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.026278 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-config-data" (OuterVolumeSpecName: "config-data") pod "4d67f22e-3af3-417b-9b53-7b1072cae514" (UID: "4d67f22e-3af3-417b-9b53-7b1072cae514"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.075097 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.075364 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmdw9\" (UniqueName: \"kubernetes.io/projected/4d67f22e-3af3-417b-9b53-7b1072cae514-kube-api-access-tmdw9\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.075377 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.075386 4644 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d67f22e-3af3-417b-9b53-7b1072cae514-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.416669 4644 generic.go:334] "Generic (PLEG): container finished" podID="4d67f22e-3af3-417b-9b53-7b1072cae514" containerID="e4efce6e4ea0e3ec75d9c5f9a594c9d9e0d1ab57eab7d469149d597c7df4142b" exitCode=0 Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.416716 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5775886bd4-4n8bf" event={"ID":"4d67f22e-3af3-417b-9b53-7b1072cae514","Type":"ContainerDied","Data":"e4efce6e4ea0e3ec75d9c5f9a594c9d9e0d1ab57eab7d469149d597c7df4142b"} Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.416744 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5775886bd4-4n8bf" event={"ID":"4d67f22e-3af3-417b-9b53-7b1072cae514","Type":"ContainerDied","Data":"6b830f57f9ccfbd4d3fe3446ae281b66ece126b5746b630ef6135bf5602fb62b"} Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.416762 4644 scope.go:117] "RemoveContainer" containerID="e4efce6e4ea0e3ec75d9c5f9a594c9d9e0d1ab57eab7d469149d597c7df4142b" Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.416908 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5775886bd4-4n8bf" Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.448507 4644 scope.go:117] "RemoveContainer" containerID="dbdd75a6091402c8469cff64cefa24d9f2d8c8d8301e08b8f0af5d9bdce2f9b5" Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.453349 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5775886bd4-4n8bf"] Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.462396 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5775886bd4-4n8bf"] Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.471010 4644 scope.go:117] "RemoveContainer" containerID="e4efce6e4ea0e3ec75d9c5f9a594c9d9e0d1ab57eab7d469149d597c7df4142b" Dec 13 07:02:41 crc kubenswrapper[4644]: E1213 07:02:41.471469 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4efce6e4ea0e3ec75d9c5f9a594c9d9e0d1ab57eab7d469149d597c7df4142b\": container with ID starting with e4efce6e4ea0e3ec75d9c5f9a594c9d9e0d1ab57eab7d469149d597c7df4142b not found: ID does not exist" containerID="e4efce6e4ea0e3ec75d9c5f9a594c9d9e0d1ab57eab7d469149d597c7df4142b" Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.471577 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4efce6e4ea0e3ec75d9c5f9a594c9d9e0d1ab57eab7d469149d597c7df4142b"} err="failed to get container status \"e4efce6e4ea0e3ec75d9c5f9a594c9d9e0d1ab57eab7d469149d597c7df4142b\": rpc error: code = NotFound desc = could not find container \"e4efce6e4ea0e3ec75d9c5f9a594c9d9e0d1ab57eab7d469149d597c7df4142b\": container with ID starting with e4efce6e4ea0e3ec75d9c5f9a594c9d9e0d1ab57eab7d469149d597c7df4142b not found: ID does not exist" Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.471668 4644 scope.go:117] "RemoveContainer" containerID="dbdd75a6091402c8469cff64cefa24d9f2d8c8d8301e08b8f0af5d9bdce2f9b5" Dec 13 07:02:41 crc kubenswrapper[4644]: E1213 07:02:41.472072 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbdd75a6091402c8469cff64cefa24d9f2d8c8d8301e08b8f0af5d9bdce2f9b5\": container with ID starting with dbdd75a6091402c8469cff64cefa24d9f2d8c8d8301e08b8f0af5d9bdce2f9b5 not found: ID does not exist" containerID="dbdd75a6091402c8469cff64cefa24d9f2d8c8d8301e08b8f0af5d9bdce2f9b5" Dec 13 07:02:41 crc kubenswrapper[4644]: I1213 07:02:41.472119 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbdd75a6091402c8469cff64cefa24d9f2d8c8d8301e08b8f0af5d9bdce2f9b5"} err="failed to get container status \"dbdd75a6091402c8469cff64cefa24d9f2d8c8d8301e08b8f0af5d9bdce2f9b5\": rpc error: code = NotFound desc = could not find container \"dbdd75a6091402c8469cff64cefa24d9f2d8c8d8301e08b8f0af5d9bdce2f9b5\": container with ID starting with dbdd75a6091402c8469cff64cefa24d9f2d8c8d8301e08b8f0af5d9bdce2f9b5 not found: ID does not exist" Dec 13 07:02:42 crc kubenswrapper[4644]: I1213 07:02:42.397381 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d67f22e-3af3-417b-9b53-7b1072cae514" path="/var/lib/kubelet/pods/4d67f22e-3af3-417b-9b53-7b1072cae514/volumes" Dec 13 07:02:44 crc kubenswrapper[4644]: I1213 07:02:44.930100 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.024665 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 13 07:02:45 crc kubenswrapper[4644]: E1213 07:02:45.025047 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d67f22e-3af3-417b-9b53-7b1072cae514" containerName="barbican-api" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.025068 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d67f22e-3af3-417b-9b53-7b1072cae514" containerName="barbican-api" Dec 13 07:02:45 crc kubenswrapper[4644]: E1213 07:02:45.025082 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" containerName="neutron-httpd" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.025089 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" containerName="neutron-httpd" Dec 13 07:02:45 crc kubenswrapper[4644]: E1213 07:02:45.025097 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" containerName="neutron-api" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.025103 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" containerName="neutron-api" Dec 13 07:02:45 crc kubenswrapper[4644]: E1213 07:02:45.025112 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d67f22e-3af3-417b-9b53-7b1072cae514" containerName="barbican-api-log" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.025118 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d67f22e-3af3-417b-9b53-7b1072cae514" containerName="barbican-api-log" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.025315 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d67f22e-3af3-417b-9b53-7b1072cae514" containerName="barbican-api" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.025333 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" containerName="neutron-api" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.025344 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d67f22e-3af3-417b-9b53-7b1072cae514" containerName="barbican-api-log" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.025352 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="35cbfc52-85ec-45cd-9083-d1be4bbf4a0a" containerName="neutron-httpd" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.025964 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.027750 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.027878 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-w4zvs" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.031400 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.032740 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.154934 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4ff37ee4-af22-4e5c-9386-e117905d1faf-openstack-config\") pod \"openstackclient\" (UID: \"4ff37ee4-af22-4e5c-9386-e117905d1faf\") " pod="openstack/openstackclient" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.155110 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4ff37ee4-af22-4e5c-9386-e117905d1faf-openstack-config-secret\") pod \"openstackclient\" (UID: \"4ff37ee4-af22-4e5c-9386-e117905d1faf\") " pod="openstack/openstackclient" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.155217 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgk5f\" (UniqueName: \"kubernetes.io/projected/4ff37ee4-af22-4e5c-9386-e117905d1faf-kube-api-access-kgk5f\") pod \"openstackclient\" (UID: \"4ff37ee4-af22-4e5c-9386-e117905d1faf\") " pod="openstack/openstackclient" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.155265 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ff37ee4-af22-4e5c-9386-e117905d1faf-combined-ca-bundle\") pod \"openstackclient\" (UID: \"4ff37ee4-af22-4e5c-9386-e117905d1faf\") " pod="openstack/openstackclient" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.257058 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4ff37ee4-af22-4e5c-9386-e117905d1faf-openstack-config\") pod \"openstackclient\" (UID: \"4ff37ee4-af22-4e5c-9386-e117905d1faf\") " pod="openstack/openstackclient" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.257197 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4ff37ee4-af22-4e5c-9386-e117905d1faf-openstack-config-secret\") pod \"openstackclient\" (UID: \"4ff37ee4-af22-4e5c-9386-e117905d1faf\") " pod="openstack/openstackclient" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.257283 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgk5f\" (UniqueName: \"kubernetes.io/projected/4ff37ee4-af22-4e5c-9386-e117905d1faf-kube-api-access-kgk5f\") pod \"openstackclient\" (UID: \"4ff37ee4-af22-4e5c-9386-e117905d1faf\") " pod="openstack/openstackclient" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.257321 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ff37ee4-af22-4e5c-9386-e117905d1faf-combined-ca-bundle\") pod \"openstackclient\" (UID: \"4ff37ee4-af22-4e5c-9386-e117905d1faf\") " pod="openstack/openstackclient" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.257986 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4ff37ee4-af22-4e5c-9386-e117905d1faf-openstack-config\") pod \"openstackclient\" (UID: \"4ff37ee4-af22-4e5c-9386-e117905d1faf\") " pod="openstack/openstackclient" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.261238 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4ff37ee4-af22-4e5c-9386-e117905d1faf-openstack-config-secret\") pod \"openstackclient\" (UID: \"4ff37ee4-af22-4e5c-9386-e117905d1faf\") " pod="openstack/openstackclient" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.261246 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ff37ee4-af22-4e5c-9386-e117905d1faf-combined-ca-bundle\") pod \"openstackclient\" (UID: \"4ff37ee4-af22-4e5c-9386-e117905d1faf\") " pod="openstack/openstackclient" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.273347 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgk5f\" (UniqueName: \"kubernetes.io/projected/4ff37ee4-af22-4e5c-9386-e117905d1faf-kube-api-access-kgk5f\") pod \"openstackclient\" (UID: \"4ff37ee4-af22-4e5c-9386-e117905d1faf\") " pod="openstack/openstackclient" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.340276 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.483223 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7dc5bbd594-tswgs" podUID="fa9ae998-069d-4264-a6d3-2a8f51373524" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.139:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.139:8443: connect: connection refused" Dec 13 07:02:45 crc kubenswrapper[4644]: I1213 07:02:45.809055 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 13 07:02:46 crc kubenswrapper[4644]: I1213 07:02:46.456430 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"4ff37ee4-af22-4e5c-9386-e117905d1faf","Type":"ContainerStarted","Data":"ea82295249c77bccd5141dc0ab7f8ba883677a47a0c5d21149e522870966cf2e"} Dec 13 07:02:52 crc kubenswrapper[4644]: I1213 07:02:52.748229 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 13 07:02:53 crc kubenswrapper[4644]: I1213 07:02:53.653040 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:53 crc kubenswrapper[4644]: I1213 07:02:53.653536 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="ceilometer-central-agent" containerID="cri-o://836074acf177e0ef8c0e4c24d38d5c9ca89c33110cb94c672cae51417ae046d1" gracePeriod=30 Dec 13 07:02:53 crc kubenswrapper[4644]: I1213 07:02:53.653609 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="ceilometer-notification-agent" containerID="cri-o://7382fb24bae47ca01373fef32f5c42429ed0190de6ed32685f977088b223308c" gracePeriod=30 Dec 13 07:02:53 crc kubenswrapper[4644]: I1213 07:02:53.653562 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="proxy-httpd" containerID="cri-o://60fb582ce93d53b4fe08c3e50f81c71187aa48ad1358221232b02b2544f56cf9" gracePeriod=30 Dec 13 07:02:53 crc kubenswrapper[4644]: I1213 07:02:53.653638 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="sg-core" containerID="cri-o://e8f1b91106029dbe15d2e5eb6edb9e115d90127c1dd4aca942cc26886281019f" gracePeriod=30 Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.517471 4644 generic.go:334] "Generic (PLEG): container finished" podID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerID="60fb582ce93d53b4fe08c3e50f81c71187aa48ad1358221232b02b2544f56cf9" exitCode=0 Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.517768 4644 generic.go:334] "Generic (PLEG): container finished" podID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerID="e8f1b91106029dbe15d2e5eb6edb9e115d90127c1dd4aca942cc26886281019f" exitCode=2 Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.517794 4644 generic.go:334] "Generic (PLEG): container finished" podID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerID="836074acf177e0ef8c0e4c24d38d5c9ca89c33110cb94c672cae51417ae046d1" exitCode=0 Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.517551 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4c55472a-153d-4fd9-a16d-95a17cfc69c2","Type":"ContainerDied","Data":"60fb582ce93d53b4fe08c3e50f81c71187aa48ad1358221232b02b2544f56cf9"} Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.517885 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4c55472a-153d-4fd9-a16d-95a17cfc69c2","Type":"ContainerDied","Data":"e8f1b91106029dbe15d2e5eb6edb9e115d90127c1dd4aca942cc26886281019f"} Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.517902 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4c55472a-153d-4fd9-a16d-95a17cfc69c2","Type":"ContainerDied","Data":"836074acf177e0ef8c0e4c24d38d5c9ca89c33110cb94c672cae51417ae046d1"} Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.519697 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"4ff37ee4-af22-4e5c-9386-e117905d1faf","Type":"ContainerStarted","Data":"2bf2d5ebbce90e01606785c8dd480ae3e35521de4cf122470b0dbcc9e20f686b"} Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.543576 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.916891854 podStartE2EDuration="10.543553117s" podCreationTimestamp="2025-12-13 07:02:44 +0000 UTC" firstStartedPulling="2025-12-13 07:02:45.822671978 +0000 UTC m=+1028.037622811" lastFinishedPulling="2025-12-13 07:02:53.449333241 +0000 UTC m=+1035.664284074" observedRunningTime="2025-12-13 07:02:54.539894838 +0000 UTC m=+1036.754845671" watchObservedRunningTime="2025-12-13 07:02:54.543553117 +0000 UTC m=+1036.758503951" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.571505 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-d6gts"] Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.572558 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-d6gts" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.589077 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-d6gts"] Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.695761 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-kwtnr"] Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.697048 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-kwtnr" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.704566 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-6797-account-create-update-4lnzn"] Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.705593 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6797-account-create-update-4lnzn" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.707044 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.711983 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-kwtnr"] Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.717576 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-6797-account-create-update-4lnzn"] Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.737151 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84vvm\" (UniqueName: \"kubernetes.io/projected/6d25b79c-9651-4e87-acd1-85b3d955cafa-kube-api-access-84vvm\") pod \"nova-api-db-create-d6gts\" (UID: \"6d25b79c-9651-4e87-acd1-85b3d955cafa\") " pod="openstack/nova-api-db-create-d6gts" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.737506 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d25b79c-9651-4e87-acd1-85b3d955cafa-operator-scripts\") pod \"nova-api-db-create-d6gts\" (UID: \"6d25b79c-9651-4e87-acd1-85b3d955cafa\") " pod="openstack/nova-api-db-create-d6gts" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.806484 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-93c6-account-create-update-pn2qf"] Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.807951 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-93c6-account-create-update-pn2qf" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.808709 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-93c6-account-create-update-pn2qf"] Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.812870 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.839088 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84vvm\" (UniqueName: \"kubernetes.io/projected/6d25b79c-9651-4e87-acd1-85b3d955cafa-kube-api-access-84vvm\") pod \"nova-api-db-create-d6gts\" (UID: \"6d25b79c-9651-4e87-acd1-85b3d955cafa\") " pod="openstack/nova-api-db-create-d6gts" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.839146 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e60af70b-0981-447f-8697-ac2689821fb8-operator-scripts\") pod \"nova-cell0-db-create-kwtnr\" (UID: \"e60af70b-0981-447f-8697-ac2689821fb8\") " pod="openstack/nova-cell0-db-create-kwtnr" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.839175 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34cd0e67-a753-4a1b-83c5-00f478cb5dff-operator-scripts\") pod \"nova-api-6797-account-create-update-4lnzn\" (UID: \"34cd0e67-a753-4a1b-83c5-00f478cb5dff\") " pod="openstack/nova-api-6797-account-create-update-4lnzn" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.839266 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-787vm\" (UniqueName: \"kubernetes.io/projected/34cd0e67-a753-4a1b-83c5-00f478cb5dff-kube-api-access-787vm\") pod \"nova-api-6797-account-create-update-4lnzn\" (UID: \"34cd0e67-a753-4a1b-83c5-00f478cb5dff\") " pod="openstack/nova-api-6797-account-create-update-4lnzn" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.839605 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d25b79c-9651-4e87-acd1-85b3d955cafa-operator-scripts\") pod \"nova-api-db-create-d6gts\" (UID: \"6d25b79c-9651-4e87-acd1-85b3d955cafa\") " pod="openstack/nova-api-db-create-d6gts" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.839703 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrc6x\" (UniqueName: \"kubernetes.io/projected/e60af70b-0981-447f-8697-ac2689821fb8-kube-api-access-jrc6x\") pod \"nova-cell0-db-create-kwtnr\" (UID: \"e60af70b-0981-447f-8697-ac2689821fb8\") " pod="openstack/nova-cell0-db-create-kwtnr" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.840540 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d25b79c-9651-4e87-acd1-85b3d955cafa-operator-scripts\") pod \"nova-api-db-create-d6gts\" (UID: \"6d25b79c-9651-4e87-acd1-85b3d955cafa\") " pod="openstack/nova-api-db-create-d6gts" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.861886 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84vvm\" (UniqueName: \"kubernetes.io/projected/6d25b79c-9651-4e87-acd1-85b3d955cafa-kube-api-access-84vvm\") pod \"nova-api-db-create-d6gts\" (UID: \"6d25b79c-9651-4e87-acd1-85b3d955cafa\") " pod="openstack/nova-api-db-create-d6gts" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.885045 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-krrzw"] Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.891677 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-krrzw" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.897259 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-krrzw"] Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.918597 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-d6gts" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.940926 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrc6x\" (UniqueName: \"kubernetes.io/projected/e60af70b-0981-447f-8697-ac2689821fb8-kube-api-access-jrc6x\") pod \"nova-cell0-db-create-kwtnr\" (UID: \"e60af70b-0981-447f-8697-ac2689821fb8\") " pod="openstack/nova-cell0-db-create-kwtnr" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.940987 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e60af70b-0981-447f-8697-ac2689821fb8-operator-scripts\") pod \"nova-cell0-db-create-kwtnr\" (UID: \"e60af70b-0981-447f-8697-ac2689821fb8\") " pod="openstack/nova-cell0-db-create-kwtnr" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.941014 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34cd0e67-a753-4a1b-83c5-00f478cb5dff-operator-scripts\") pod \"nova-api-6797-account-create-update-4lnzn\" (UID: \"34cd0e67-a753-4a1b-83c5-00f478cb5dff\") " pod="openstack/nova-api-6797-account-create-update-4lnzn" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.941061 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9bgq\" (UniqueName: \"kubernetes.io/projected/cd65e318-075f-4c1c-9b0c-9a3ada36a63e-kube-api-access-r9bgq\") pod \"nova-cell0-93c6-account-create-update-pn2qf\" (UID: \"cd65e318-075f-4c1c-9b0c-9a3ada36a63e\") " pod="openstack/nova-cell0-93c6-account-create-update-pn2qf" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.941104 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cd65e318-075f-4c1c-9b0c-9a3ada36a63e-operator-scripts\") pod \"nova-cell0-93c6-account-create-update-pn2qf\" (UID: \"cd65e318-075f-4c1c-9b0c-9a3ada36a63e\") " pod="openstack/nova-cell0-93c6-account-create-update-pn2qf" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.941139 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-787vm\" (UniqueName: \"kubernetes.io/projected/34cd0e67-a753-4a1b-83c5-00f478cb5dff-kube-api-access-787vm\") pod \"nova-api-6797-account-create-update-4lnzn\" (UID: \"34cd0e67-a753-4a1b-83c5-00f478cb5dff\") " pod="openstack/nova-api-6797-account-create-update-4lnzn" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.942199 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e60af70b-0981-447f-8697-ac2689821fb8-operator-scripts\") pod \"nova-cell0-db-create-kwtnr\" (UID: \"e60af70b-0981-447f-8697-ac2689821fb8\") " pod="openstack/nova-cell0-db-create-kwtnr" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.942274 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34cd0e67-a753-4a1b-83c5-00f478cb5dff-operator-scripts\") pod \"nova-api-6797-account-create-update-4lnzn\" (UID: \"34cd0e67-a753-4a1b-83c5-00f478cb5dff\") " pod="openstack/nova-api-6797-account-create-update-4lnzn" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.958262 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrc6x\" (UniqueName: \"kubernetes.io/projected/e60af70b-0981-447f-8697-ac2689821fb8-kube-api-access-jrc6x\") pod \"nova-cell0-db-create-kwtnr\" (UID: \"e60af70b-0981-447f-8697-ac2689821fb8\") " pod="openstack/nova-cell0-db-create-kwtnr" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.958422 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-787vm\" (UniqueName: \"kubernetes.io/projected/34cd0e67-a753-4a1b-83c5-00f478cb5dff-kube-api-access-787vm\") pod \"nova-api-6797-account-create-update-4lnzn\" (UID: \"34cd0e67-a753-4a1b-83c5-00f478cb5dff\") " pod="openstack/nova-api-6797-account-create-update-4lnzn" Dec 13 07:02:54 crc kubenswrapper[4644]: I1213 07:02:54.968584 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.006925 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-4267-account-create-update-rmqmq"] Dec 13 07:02:55 crc kubenswrapper[4644]: E1213 07:02:55.007680 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="sg-core" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.007705 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="sg-core" Dec 13 07:02:55 crc kubenswrapper[4644]: E1213 07:02:55.007737 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="ceilometer-notification-agent" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.007745 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="ceilometer-notification-agent" Dec 13 07:02:55 crc kubenswrapper[4644]: E1213 07:02:55.007772 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="ceilometer-central-agent" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.007787 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="ceilometer-central-agent" Dec 13 07:02:55 crc kubenswrapper[4644]: E1213 07:02:55.007801 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="proxy-httpd" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.007808 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="proxy-httpd" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.008307 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="sg-core" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.008357 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="proxy-httpd" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.008382 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="ceilometer-notification-agent" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.008415 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerName="ceilometer-central-agent" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.009247 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4267-account-create-update-rmqmq" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.012858 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.026352 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6797-account-create-update-4lnzn" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.032003 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-kwtnr" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.046637 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-sg-core-conf-yaml\") pod \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.046726 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-config-data\") pod \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.046795 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4c55472a-153d-4fd9-a16d-95a17cfc69c2-log-httpd\") pod \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.046845 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4c55472a-153d-4fd9-a16d-95a17cfc69c2-run-httpd\") pod \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.046889 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-combined-ca-bundle\") pod \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.046959 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-scripts\") pod \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.047038 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vprwr\" (UniqueName: \"kubernetes.io/projected/4c55472a-153d-4fd9-a16d-95a17cfc69c2-kube-api-access-vprwr\") pod \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\" (UID: \"4c55472a-153d-4fd9-a16d-95a17cfc69c2\") " Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.047433 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9bgq\" (UniqueName: \"kubernetes.io/projected/cd65e318-075f-4c1c-9b0c-9a3ada36a63e-kube-api-access-r9bgq\") pod \"nova-cell0-93c6-account-create-update-pn2qf\" (UID: \"cd65e318-075f-4c1c-9b0c-9a3ada36a63e\") " pod="openstack/nova-cell0-93c6-account-create-update-pn2qf" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.047519 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4-operator-scripts\") pod \"nova-cell1-db-create-krrzw\" (UID: \"5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4\") " pod="openstack/nova-cell1-db-create-krrzw" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.047565 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cd65e318-075f-4c1c-9b0c-9a3ada36a63e-operator-scripts\") pod \"nova-cell0-93c6-account-create-update-pn2qf\" (UID: \"cd65e318-075f-4c1c-9b0c-9a3ada36a63e\") " pod="openstack/nova-cell0-93c6-account-create-update-pn2qf" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.047839 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkq8q\" (UniqueName: \"kubernetes.io/projected/5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4-kube-api-access-rkq8q\") pod \"nova-cell1-db-create-krrzw\" (UID: \"5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4\") " pod="openstack/nova-cell1-db-create-krrzw" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.049887 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c55472a-153d-4fd9-a16d-95a17cfc69c2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4c55472a-153d-4fd9-a16d-95a17cfc69c2" (UID: "4c55472a-153d-4fd9-a16d-95a17cfc69c2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.053352 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c55472a-153d-4fd9-a16d-95a17cfc69c2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4c55472a-153d-4fd9-a16d-95a17cfc69c2" (UID: "4c55472a-153d-4fd9-a16d-95a17cfc69c2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.054602 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cd65e318-075f-4c1c-9b0c-9a3ada36a63e-operator-scripts\") pod \"nova-cell0-93c6-account-create-update-pn2qf\" (UID: \"cd65e318-075f-4c1c-9b0c-9a3ada36a63e\") " pod="openstack/nova-cell0-93c6-account-create-update-pn2qf" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.057461 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4267-account-create-update-rmqmq"] Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.061562 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-scripts" (OuterVolumeSpecName: "scripts") pod "4c55472a-153d-4fd9-a16d-95a17cfc69c2" (UID: "4c55472a-153d-4fd9-a16d-95a17cfc69c2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.068374 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9bgq\" (UniqueName: \"kubernetes.io/projected/cd65e318-075f-4c1c-9b0c-9a3ada36a63e-kube-api-access-r9bgq\") pod \"nova-cell0-93c6-account-create-update-pn2qf\" (UID: \"cd65e318-075f-4c1c-9b0c-9a3ada36a63e\") " pod="openstack/nova-cell0-93c6-account-create-update-pn2qf" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.068544 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c55472a-153d-4fd9-a16d-95a17cfc69c2-kube-api-access-vprwr" (OuterVolumeSpecName: "kube-api-access-vprwr") pod "4c55472a-153d-4fd9-a16d-95a17cfc69c2" (UID: "4c55472a-153d-4fd9-a16d-95a17cfc69c2"). InnerVolumeSpecName "kube-api-access-vprwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.113294 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4c55472a-153d-4fd9-a16d-95a17cfc69c2" (UID: "4c55472a-153d-4fd9-a16d-95a17cfc69c2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.130770 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-93c6-account-create-update-pn2qf" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.151384 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkq8q\" (UniqueName: \"kubernetes.io/projected/5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4-kube-api-access-rkq8q\") pod \"nova-cell1-db-create-krrzw\" (UID: \"5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4\") " pod="openstack/nova-cell1-db-create-krrzw" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.151665 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xx5g\" (UniqueName: \"kubernetes.io/projected/fd0c3333-bd69-477d-9989-1c614cf6c2c6-kube-api-access-8xx5g\") pod \"nova-cell1-4267-account-create-update-rmqmq\" (UID: \"fd0c3333-bd69-477d-9989-1c614cf6c2c6\") " pod="openstack/nova-cell1-4267-account-create-update-rmqmq" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.152100 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4-operator-scripts\") pod \"nova-cell1-db-create-krrzw\" (UID: \"5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4\") " pod="openstack/nova-cell1-db-create-krrzw" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.152126 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd0c3333-bd69-477d-9989-1c614cf6c2c6-operator-scripts\") pod \"nova-cell1-4267-account-create-update-rmqmq\" (UID: \"fd0c3333-bd69-477d-9989-1c614cf6c2c6\") " pod="openstack/nova-cell1-4267-account-create-update-rmqmq" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.152603 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.152652 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vprwr\" (UniqueName: \"kubernetes.io/projected/4c55472a-153d-4fd9-a16d-95a17cfc69c2-kube-api-access-vprwr\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.152667 4644 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.152676 4644 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4c55472a-153d-4fd9-a16d-95a17cfc69c2-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.152687 4644 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4c55472a-153d-4fd9-a16d-95a17cfc69c2-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.154293 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4-operator-scripts\") pod \"nova-cell1-db-create-krrzw\" (UID: \"5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4\") " pod="openstack/nova-cell1-db-create-krrzw" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.154418 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-config-data" (OuterVolumeSpecName: "config-data") pod "4c55472a-153d-4fd9-a16d-95a17cfc69c2" (UID: "4c55472a-153d-4fd9-a16d-95a17cfc69c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.160903 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c55472a-153d-4fd9-a16d-95a17cfc69c2" (UID: "4c55472a-153d-4fd9-a16d-95a17cfc69c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.176666 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkq8q\" (UniqueName: \"kubernetes.io/projected/5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4-kube-api-access-rkq8q\") pod \"nova-cell1-db-create-krrzw\" (UID: \"5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4\") " pod="openstack/nova-cell1-db-create-krrzw" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.255043 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xx5g\" (UniqueName: \"kubernetes.io/projected/fd0c3333-bd69-477d-9989-1c614cf6c2c6-kube-api-access-8xx5g\") pod \"nova-cell1-4267-account-create-update-rmqmq\" (UID: \"fd0c3333-bd69-477d-9989-1c614cf6c2c6\") " pod="openstack/nova-cell1-4267-account-create-update-rmqmq" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.255209 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd0c3333-bd69-477d-9989-1c614cf6c2c6-operator-scripts\") pod \"nova-cell1-4267-account-create-update-rmqmq\" (UID: \"fd0c3333-bd69-477d-9989-1c614cf6c2c6\") " pod="openstack/nova-cell1-4267-account-create-update-rmqmq" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.255388 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.255404 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c55472a-153d-4fd9-a16d-95a17cfc69c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.257815 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd0c3333-bd69-477d-9989-1c614cf6c2c6-operator-scripts\") pod \"nova-cell1-4267-account-create-update-rmqmq\" (UID: \"fd0c3333-bd69-477d-9989-1c614cf6c2c6\") " pod="openstack/nova-cell1-4267-account-create-update-rmqmq" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.272049 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xx5g\" (UniqueName: \"kubernetes.io/projected/fd0c3333-bd69-477d-9989-1c614cf6c2c6-kube-api-access-8xx5g\") pod \"nova-cell1-4267-account-create-update-rmqmq\" (UID: \"fd0c3333-bd69-477d-9989-1c614cf6c2c6\") " pod="openstack/nova-cell1-4267-account-create-update-rmqmq" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.282557 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-krrzw" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.353670 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4267-account-create-update-rmqmq" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.384273 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-d6gts"] Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.484009 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7dc5bbd594-tswgs" podUID="fa9ae998-069d-4264-a6d3-2a8f51373524" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.139:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.139:8443: connect: connection refused" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.484109 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.545147 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-d6gts" event={"ID":"6d25b79c-9651-4e87-acd1-85b3d955cafa","Type":"ContainerStarted","Data":"380817042bf4478f7706c5e8645bded5510193ffaccd346c9b27db16c03fe717"} Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.556001 4644 generic.go:334] "Generic (PLEG): container finished" podID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" containerID="7382fb24bae47ca01373fef32f5c42429ed0190de6ed32685f977088b223308c" exitCode=0 Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.557049 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4c55472a-153d-4fd9-a16d-95a17cfc69c2","Type":"ContainerDied","Data":"7382fb24bae47ca01373fef32f5c42429ed0190de6ed32685f977088b223308c"} Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.557089 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4c55472a-153d-4fd9-a16d-95a17cfc69c2","Type":"ContainerDied","Data":"1573351e0323286765420c2707dec2a1c086b5d3b5fb7013c60f22200fcfcdaa"} Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.557109 4644 scope.go:117] "RemoveContainer" containerID="60fb582ce93d53b4fe08c3e50f81c71187aa48ad1358221232b02b2544f56cf9" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.557161 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.577297 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-kwtnr"] Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.595352 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-6797-account-create-update-4lnzn"] Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.621223 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-krrzw"] Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.659690 4644 scope.go:117] "RemoveContainer" containerID="e8f1b91106029dbe15d2e5eb6edb9e115d90127c1dd4aca942cc26886281019f" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.665638 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-93c6-account-create-update-pn2qf"] Dec 13 07:02:55 crc kubenswrapper[4644]: W1213 07:02:55.677906 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f039a4e_1c0f_49a6_bd81_3ce67dc1a6b4.slice/crio-5e15e98695331f0044c83e90aad4ef4a554483c8d47455271c9938d38eea450a WatchSource:0}: Error finding container 5e15e98695331f0044c83e90aad4ef4a554483c8d47455271c9938d38eea450a: Status 404 returned error can't find the container with id 5e15e98695331f0044c83e90aad4ef4a554483c8d47455271c9938d38eea450a Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.773362 4644 scope.go:117] "RemoveContainer" containerID="7382fb24bae47ca01373fef32f5c42429ed0190de6ed32685f977088b223308c" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.792627 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.804843 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.812243 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.814299 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.817587 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.820375 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.822812 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.825185 4644 scope.go:117] "RemoveContainer" containerID="836074acf177e0ef8c0e4c24d38d5c9ca89c33110cb94c672cae51417ae046d1" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.850374 4644 scope.go:117] "RemoveContainer" containerID="60fb582ce93d53b4fe08c3e50f81c71187aa48ad1358221232b02b2544f56cf9" Dec 13 07:02:55 crc kubenswrapper[4644]: E1213 07:02:55.852065 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60fb582ce93d53b4fe08c3e50f81c71187aa48ad1358221232b02b2544f56cf9\": container with ID starting with 60fb582ce93d53b4fe08c3e50f81c71187aa48ad1358221232b02b2544f56cf9 not found: ID does not exist" containerID="60fb582ce93d53b4fe08c3e50f81c71187aa48ad1358221232b02b2544f56cf9" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.852110 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60fb582ce93d53b4fe08c3e50f81c71187aa48ad1358221232b02b2544f56cf9"} err="failed to get container status \"60fb582ce93d53b4fe08c3e50f81c71187aa48ad1358221232b02b2544f56cf9\": rpc error: code = NotFound desc = could not find container \"60fb582ce93d53b4fe08c3e50f81c71187aa48ad1358221232b02b2544f56cf9\": container with ID starting with 60fb582ce93d53b4fe08c3e50f81c71187aa48ad1358221232b02b2544f56cf9 not found: ID does not exist" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.852136 4644 scope.go:117] "RemoveContainer" containerID="e8f1b91106029dbe15d2e5eb6edb9e115d90127c1dd4aca942cc26886281019f" Dec 13 07:02:55 crc kubenswrapper[4644]: E1213 07:02:55.852701 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8f1b91106029dbe15d2e5eb6edb9e115d90127c1dd4aca942cc26886281019f\": container with ID starting with e8f1b91106029dbe15d2e5eb6edb9e115d90127c1dd4aca942cc26886281019f not found: ID does not exist" containerID="e8f1b91106029dbe15d2e5eb6edb9e115d90127c1dd4aca942cc26886281019f" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.852733 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8f1b91106029dbe15d2e5eb6edb9e115d90127c1dd4aca942cc26886281019f"} err="failed to get container status \"e8f1b91106029dbe15d2e5eb6edb9e115d90127c1dd4aca942cc26886281019f\": rpc error: code = NotFound desc = could not find container \"e8f1b91106029dbe15d2e5eb6edb9e115d90127c1dd4aca942cc26886281019f\": container with ID starting with e8f1b91106029dbe15d2e5eb6edb9e115d90127c1dd4aca942cc26886281019f not found: ID does not exist" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.852753 4644 scope.go:117] "RemoveContainer" containerID="7382fb24bae47ca01373fef32f5c42429ed0190de6ed32685f977088b223308c" Dec 13 07:02:55 crc kubenswrapper[4644]: E1213 07:02:55.853070 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7382fb24bae47ca01373fef32f5c42429ed0190de6ed32685f977088b223308c\": container with ID starting with 7382fb24bae47ca01373fef32f5c42429ed0190de6ed32685f977088b223308c not found: ID does not exist" containerID="7382fb24bae47ca01373fef32f5c42429ed0190de6ed32685f977088b223308c" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.853093 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7382fb24bae47ca01373fef32f5c42429ed0190de6ed32685f977088b223308c"} err="failed to get container status \"7382fb24bae47ca01373fef32f5c42429ed0190de6ed32685f977088b223308c\": rpc error: code = NotFound desc = could not find container \"7382fb24bae47ca01373fef32f5c42429ed0190de6ed32685f977088b223308c\": container with ID starting with 7382fb24bae47ca01373fef32f5c42429ed0190de6ed32685f977088b223308c not found: ID does not exist" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.853108 4644 scope.go:117] "RemoveContainer" containerID="836074acf177e0ef8c0e4c24d38d5c9ca89c33110cb94c672cae51417ae046d1" Dec 13 07:02:55 crc kubenswrapper[4644]: E1213 07:02:55.857626 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"836074acf177e0ef8c0e4c24d38d5c9ca89c33110cb94c672cae51417ae046d1\": container with ID starting with 836074acf177e0ef8c0e4c24d38d5c9ca89c33110cb94c672cae51417ae046d1 not found: ID does not exist" containerID="836074acf177e0ef8c0e4c24d38d5c9ca89c33110cb94c672cae51417ae046d1" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.857657 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"836074acf177e0ef8c0e4c24d38d5c9ca89c33110cb94c672cae51417ae046d1"} err="failed to get container status \"836074acf177e0ef8c0e4c24d38d5c9ca89c33110cb94c672cae51417ae046d1\": rpc error: code = NotFound desc = could not find container \"836074acf177e0ef8c0e4c24d38d5c9ca89c33110cb94c672cae51417ae046d1\": container with ID starting with 836074acf177e0ef8c0e4c24d38d5c9ca89c33110cb94c672cae51417ae046d1 not found: ID does not exist" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.881137 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:55 crc kubenswrapper[4644]: E1213 07:02:55.881944 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data kube-api-access-gxnhm log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[combined-ca-bundle config-data kube-api-access-gxnhm log-httpd run-httpd scripts sg-core-conf-yaml]: context canceled" pod="openstack/ceilometer-0" podUID="fb350364-28f0-4ae8-b83f-8caf99a4c9be" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.890119 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4267-account-create-update-rmqmq"] Dec 13 07:02:55 crc kubenswrapper[4644]: W1213 07:02:55.895048 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd0c3333_bd69_477d_9989_1c614cf6c2c6.slice/crio-9063d0ce070699ca39169f045585fe99aeccc51e67aa114e4ed60738841753b8 WatchSource:0}: Error finding container 9063d0ce070699ca39169f045585fe99aeccc51e67aa114e4ed60738841753b8: Status 404 returned error can't find the container with id 9063d0ce070699ca39169f045585fe99aeccc51e67aa114e4ed60738841753b8 Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.976550 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.976847 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb350364-28f0-4ae8-b83f-8caf99a4c9be-run-httpd\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.976876 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.976917 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-config-data\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.977098 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxnhm\" (UniqueName: \"kubernetes.io/projected/fb350364-28f0-4ae8-b83f-8caf99a4c9be-kube-api-access-gxnhm\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.977167 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb350364-28f0-4ae8-b83f-8caf99a4c9be-log-httpd\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:55 crc kubenswrapper[4644]: I1213 07:02:55.977193 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-scripts\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.078183 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb350364-28f0-4ae8-b83f-8caf99a4c9be-log-httpd\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.078238 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-scripts\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.078317 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.078345 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb350364-28f0-4ae8-b83f-8caf99a4c9be-run-httpd\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.078369 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.078406 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-config-data\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.078591 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxnhm\" (UniqueName: \"kubernetes.io/projected/fb350364-28f0-4ae8-b83f-8caf99a4c9be-kube-api-access-gxnhm\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.078693 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb350364-28f0-4ae8-b83f-8caf99a4c9be-log-httpd\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.078875 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb350364-28f0-4ae8-b83f-8caf99a4c9be-run-httpd\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.084702 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.084891 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-config-data\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.085294 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.086638 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-scripts\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.098476 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxnhm\" (UniqueName: \"kubernetes.io/projected/fb350364-28f0-4ae8-b83f-8caf99a4c9be-kube-api-access-gxnhm\") pod \"ceilometer-0\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.404002 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c55472a-153d-4fd9-a16d-95a17cfc69c2" path="/var/lib/kubelet/pods/4c55472a-153d-4fd9-a16d-95a17cfc69c2/volumes" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.566286 4644 generic.go:334] "Generic (PLEG): container finished" podID="e60af70b-0981-447f-8697-ac2689821fb8" containerID="d37925dc19bf2c8bbb92be40c4d3ea0031e1ae8f733bbfefbc51540c753094f1" exitCode=0 Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.566373 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-kwtnr" event={"ID":"e60af70b-0981-447f-8697-ac2689821fb8","Type":"ContainerDied","Data":"d37925dc19bf2c8bbb92be40c4d3ea0031e1ae8f733bbfefbc51540c753094f1"} Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.567835 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-kwtnr" event={"ID":"e60af70b-0981-447f-8697-ac2689821fb8","Type":"ContainerStarted","Data":"321aa3569ffbe9102edaedf2e61d422dd78cbbfaeca5fc68c674539460922b66"} Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.570003 4644 generic.go:334] "Generic (PLEG): container finished" podID="6d25b79c-9651-4e87-acd1-85b3d955cafa" containerID="0b1e6f107ee7d0752b8918402b7db77af6a4d08a78d67944df42e807018a60e6" exitCode=0 Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.570047 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-d6gts" event={"ID":"6d25b79c-9651-4e87-acd1-85b3d955cafa","Type":"ContainerDied","Data":"0b1e6f107ee7d0752b8918402b7db77af6a4d08a78d67944df42e807018a60e6"} Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.571909 4644 generic.go:334] "Generic (PLEG): container finished" podID="34cd0e67-a753-4a1b-83c5-00f478cb5dff" containerID="aed194971b5f1981fff2f7009769d2e0e792f4fe8c6a9aedee65a285b52359b1" exitCode=0 Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.571981 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6797-account-create-update-4lnzn" event={"ID":"34cd0e67-a753-4a1b-83c5-00f478cb5dff","Type":"ContainerDied","Data":"aed194971b5f1981fff2f7009769d2e0e792f4fe8c6a9aedee65a285b52359b1"} Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.572002 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6797-account-create-update-4lnzn" event={"ID":"34cd0e67-a753-4a1b-83c5-00f478cb5dff","Type":"ContainerStarted","Data":"7f0ddf3a375feacff8f020763b7bc384451016b2018c0c86a67a42289095a1b5"} Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.575160 4644 generic.go:334] "Generic (PLEG): container finished" podID="fd0c3333-bd69-477d-9989-1c614cf6c2c6" containerID="bec4b4e36b329fd7f5534c46ed9a6ef0cd00af58ec9aed18af0295ffe275c722" exitCode=0 Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.575201 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4267-account-create-update-rmqmq" event={"ID":"fd0c3333-bd69-477d-9989-1c614cf6c2c6","Type":"ContainerDied","Data":"bec4b4e36b329fd7f5534c46ed9a6ef0cd00af58ec9aed18af0295ffe275c722"} Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.575246 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4267-account-create-update-rmqmq" event={"ID":"fd0c3333-bd69-477d-9989-1c614cf6c2c6","Type":"ContainerStarted","Data":"9063d0ce070699ca39169f045585fe99aeccc51e67aa114e4ed60738841753b8"} Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.577385 4644 generic.go:334] "Generic (PLEG): container finished" podID="cd65e318-075f-4c1c-9b0c-9a3ada36a63e" containerID="003dbdc46051909f9b871c9826b2c83493e678e380a26c43b98e5181f61f892b" exitCode=0 Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.577467 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-93c6-account-create-update-pn2qf" event={"ID":"cd65e318-075f-4c1c-9b0c-9a3ada36a63e","Type":"ContainerDied","Data":"003dbdc46051909f9b871c9826b2c83493e678e380a26c43b98e5181f61f892b"} Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.577508 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-93c6-account-create-update-pn2qf" event={"ID":"cd65e318-075f-4c1c-9b0c-9a3ada36a63e","Type":"ContainerStarted","Data":"2fc0aeec8e48458bed978d7950d3602004dc90008bd35c3eb010b57628b68c14"} Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.579532 4644 generic.go:334] "Generic (PLEG): container finished" podID="5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4" containerID="6623c4fa9a070596fa9c295a06a77b1941eaf169d0b1d179dcb300b468b3d65e" exitCode=0 Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.579622 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-krrzw" event={"ID":"5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4","Type":"ContainerDied","Data":"6623c4fa9a070596fa9c295a06a77b1941eaf169d0b1d179dcb300b468b3d65e"} Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.579691 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-krrzw" event={"ID":"5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4","Type":"ContainerStarted","Data":"5e15e98695331f0044c83e90aad4ef4a554483c8d47455271c9938d38eea450a"} Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.579829 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.590953 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.690263 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb350364-28f0-4ae8-b83f-8caf99a4c9be-run-httpd\") pod \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.690308 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb350364-28f0-4ae8-b83f-8caf99a4c9be-log-httpd\") pod \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.690474 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-combined-ca-bundle\") pod \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.690527 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxnhm\" (UniqueName: \"kubernetes.io/projected/fb350364-28f0-4ae8-b83f-8caf99a4c9be-kube-api-access-gxnhm\") pod \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.690580 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-sg-core-conf-yaml\") pod \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.690602 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-scripts\") pod \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.690646 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-config-data\") pod \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\" (UID: \"fb350364-28f0-4ae8-b83f-8caf99a4c9be\") " Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.690893 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb350364-28f0-4ae8-b83f-8caf99a4c9be-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fb350364-28f0-4ae8-b83f-8caf99a4c9be" (UID: "fb350364-28f0-4ae8-b83f-8caf99a4c9be"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.690956 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb350364-28f0-4ae8-b83f-8caf99a4c9be-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fb350364-28f0-4ae8-b83f-8caf99a4c9be" (UID: "fb350364-28f0-4ae8-b83f-8caf99a4c9be"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.691820 4644 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb350364-28f0-4ae8-b83f-8caf99a4c9be-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.691842 4644 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fb350364-28f0-4ae8-b83f-8caf99a4c9be-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.694288 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-scripts" (OuterVolumeSpecName: "scripts") pod "fb350364-28f0-4ae8-b83f-8caf99a4c9be" (UID: "fb350364-28f0-4ae8-b83f-8caf99a4c9be"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.695649 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb350364-28f0-4ae8-b83f-8caf99a4c9be" (UID: "fb350364-28f0-4ae8-b83f-8caf99a4c9be"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.695689 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fb350364-28f0-4ae8-b83f-8caf99a4c9be" (UID: "fb350364-28f0-4ae8-b83f-8caf99a4c9be"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.695787 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-config-data" (OuterVolumeSpecName: "config-data") pod "fb350364-28f0-4ae8-b83f-8caf99a4c9be" (UID: "fb350364-28f0-4ae8-b83f-8caf99a4c9be"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.696823 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb350364-28f0-4ae8-b83f-8caf99a4c9be-kube-api-access-gxnhm" (OuterVolumeSpecName: "kube-api-access-gxnhm") pod "fb350364-28f0-4ae8-b83f-8caf99a4c9be" (UID: "fb350364-28f0-4ae8-b83f-8caf99a4c9be"). InnerVolumeSpecName "kube-api-access-gxnhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.793693 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.793736 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxnhm\" (UniqueName: \"kubernetes.io/projected/fb350364-28f0-4ae8-b83f-8caf99a4c9be-kube-api-access-gxnhm\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.793750 4644 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.793761 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:56 crc kubenswrapper[4644]: I1213 07:02:56.793772 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb350364-28f0-4ae8-b83f-8caf99a4c9be-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.585863 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.644681 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.659947 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.664792 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.667500 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.669908 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.672966 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.698211 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.764471 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:57 crc kubenswrapper[4644]: E1213 07:02:57.765255 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data kube-api-access-xpzk9 log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/ceilometer-0" podUID="eb9437a7-7b30-4bf1-af7a-e2aa96919506" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.815505 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-scripts\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.815556 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.815625 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9437a7-7b30-4bf1-af7a-e2aa96919506-log-httpd\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.815669 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9437a7-7b30-4bf1-af7a-e2aa96919506-run-httpd\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.815755 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.815796 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-config-data\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.815912 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpzk9\" (UniqueName: \"kubernetes.io/projected/eb9437a7-7b30-4bf1-af7a-e2aa96919506-kube-api-access-xpzk9\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.918687 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.918731 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-config-data\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.918813 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpzk9\" (UniqueName: \"kubernetes.io/projected/eb9437a7-7b30-4bf1-af7a-e2aa96919506-kube-api-access-xpzk9\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.918931 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-scripts\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.918976 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.919014 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9437a7-7b30-4bf1-af7a-e2aa96919506-log-httpd\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.919040 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9437a7-7b30-4bf1-af7a-e2aa96919506-run-httpd\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.919566 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9437a7-7b30-4bf1-af7a-e2aa96919506-run-httpd\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.920855 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9437a7-7b30-4bf1-af7a-e2aa96919506-log-httpd\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.931888 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.932076 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-config-data\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.933004 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-scripts\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.933951 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:57 crc kubenswrapper[4644]: I1213 07:02:57.938867 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpzk9\" (UniqueName: \"kubernetes.io/projected/eb9437a7-7b30-4bf1-af7a-e2aa96919506-kube-api-access-xpzk9\") pod \"ceilometer-0\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " pod="openstack/ceilometer-0" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.100425 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4267-account-create-update-rmqmq" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.107098 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-d6gts" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.116389 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-93c6-account-create-update-pn2qf" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.126208 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-kwtnr" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.147510 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-krrzw" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.148095 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6797-account-create-update-4lnzn" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.225622 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cd65e318-075f-4c1c-9b0c-9a3ada36a63e-operator-scripts\") pod \"cd65e318-075f-4c1c-9b0c-9a3ada36a63e\" (UID: \"cd65e318-075f-4c1c-9b0c-9a3ada36a63e\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.225904 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4-operator-scripts\") pod \"5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4\" (UID: \"5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.225938 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34cd0e67-a753-4a1b-83c5-00f478cb5dff-operator-scripts\") pod \"34cd0e67-a753-4a1b-83c5-00f478cb5dff\" (UID: \"34cd0e67-a753-4a1b-83c5-00f478cb5dff\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.225996 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd0c3333-bd69-477d-9989-1c614cf6c2c6-operator-scripts\") pod \"fd0c3333-bd69-477d-9989-1c614cf6c2c6\" (UID: \"fd0c3333-bd69-477d-9989-1c614cf6c2c6\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.226028 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xx5g\" (UniqueName: \"kubernetes.io/projected/fd0c3333-bd69-477d-9989-1c614cf6c2c6-kube-api-access-8xx5g\") pod \"fd0c3333-bd69-477d-9989-1c614cf6c2c6\" (UID: \"fd0c3333-bd69-477d-9989-1c614cf6c2c6\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.226053 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd65e318-075f-4c1c-9b0c-9a3ada36a63e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cd65e318-075f-4c1c-9b0c-9a3ada36a63e" (UID: "cd65e318-075f-4c1c-9b0c-9a3ada36a63e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.226131 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84vvm\" (UniqueName: \"kubernetes.io/projected/6d25b79c-9651-4e87-acd1-85b3d955cafa-kube-api-access-84vvm\") pod \"6d25b79c-9651-4e87-acd1-85b3d955cafa\" (UID: \"6d25b79c-9651-4e87-acd1-85b3d955cafa\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.226183 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkq8q\" (UniqueName: \"kubernetes.io/projected/5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4-kube-api-access-rkq8q\") pod \"5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4\" (UID: \"5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.226239 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-787vm\" (UniqueName: \"kubernetes.io/projected/34cd0e67-a753-4a1b-83c5-00f478cb5dff-kube-api-access-787vm\") pod \"34cd0e67-a753-4a1b-83c5-00f478cb5dff\" (UID: \"34cd0e67-a753-4a1b-83c5-00f478cb5dff\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.226264 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e60af70b-0981-447f-8697-ac2689821fb8-operator-scripts\") pod \"e60af70b-0981-447f-8697-ac2689821fb8\" (UID: \"e60af70b-0981-447f-8697-ac2689821fb8\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.226304 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrc6x\" (UniqueName: \"kubernetes.io/projected/e60af70b-0981-447f-8697-ac2689821fb8-kube-api-access-jrc6x\") pod \"e60af70b-0981-447f-8697-ac2689821fb8\" (UID: \"e60af70b-0981-447f-8697-ac2689821fb8\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.226326 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d25b79c-9651-4e87-acd1-85b3d955cafa-operator-scripts\") pod \"6d25b79c-9651-4e87-acd1-85b3d955cafa\" (UID: \"6d25b79c-9651-4e87-acd1-85b3d955cafa\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.226359 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4" (UID: "5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.226370 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9bgq\" (UniqueName: \"kubernetes.io/projected/cd65e318-075f-4c1c-9b0c-9a3ada36a63e-kube-api-access-r9bgq\") pod \"cd65e318-075f-4c1c-9b0c-9a3ada36a63e\" (UID: \"cd65e318-075f-4c1c-9b0c-9a3ada36a63e\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.226659 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e60af70b-0981-447f-8697-ac2689821fb8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e60af70b-0981-447f-8697-ac2689821fb8" (UID: "e60af70b-0981-447f-8697-ac2689821fb8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.226900 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34cd0e67-a753-4a1b-83c5-00f478cb5dff-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "34cd0e67-a753-4a1b-83c5-00f478cb5dff" (UID: "34cd0e67-a753-4a1b-83c5-00f478cb5dff"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.226939 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d25b79c-9651-4e87-acd1-85b3d955cafa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6d25b79c-9651-4e87-acd1-85b3d955cafa" (UID: "6d25b79c-9651-4e87-acd1-85b3d955cafa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.227166 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd0c3333-bd69-477d-9989-1c614cf6c2c6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fd0c3333-bd69-477d-9989-1c614cf6c2c6" (UID: "fd0c3333-bd69-477d-9989-1c614cf6c2c6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.227226 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e60af70b-0981-447f-8697-ac2689821fb8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.227240 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d25b79c-9651-4e87-acd1-85b3d955cafa-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.227249 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cd65e318-075f-4c1c-9b0c-9a3ada36a63e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.227257 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.227277 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34cd0e67-a753-4a1b-83c5-00f478cb5dff-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.229585 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd0c3333-bd69-477d-9989-1c614cf6c2c6-kube-api-access-8xx5g" (OuterVolumeSpecName: "kube-api-access-8xx5g") pod "fd0c3333-bd69-477d-9989-1c614cf6c2c6" (UID: "fd0c3333-bd69-477d-9989-1c614cf6c2c6"). InnerVolumeSpecName "kube-api-access-8xx5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.229937 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e60af70b-0981-447f-8697-ac2689821fb8-kube-api-access-jrc6x" (OuterVolumeSpecName: "kube-api-access-jrc6x") pod "e60af70b-0981-447f-8697-ac2689821fb8" (UID: "e60af70b-0981-447f-8697-ac2689821fb8"). InnerVolumeSpecName "kube-api-access-jrc6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.229961 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34cd0e67-a753-4a1b-83c5-00f478cb5dff-kube-api-access-787vm" (OuterVolumeSpecName: "kube-api-access-787vm") pod "34cd0e67-a753-4a1b-83c5-00f478cb5dff" (UID: "34cd0e67-a753-4a1b-83c5-00f478cb5dff"). InnerVolumeSpecName "kube-api-access-787vm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.230703 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d25b79c-9651-4e87-acd1-85b3d955cafa-kube-api-access-84vvm" (OuterVolumeSpecName: "kube-api-access-84vvm") pod "6d25b79c-9651-4e87-acd1-85b3d955cafa" (UID: "6d25b79c-9651-4e87-acd1-85b3d955cafa"). InnerVolumeSpecName "kube-api-access-84vvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.231202 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd65e318-075f-4c1c-9b0c-9a3ada36a63e-kube-api-access-r9bgq" (OuterVolumeSpecName: "kube-api-access-r9bgq") pod "cd65e318-075f-4c1c-9b0c-9a3ada36a63e" (UID: "cd65e318-075f-4c1c-9b0c-9a3ada36a63e"). InnerVolumeSpecName "kube-api-access-r9bgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.231534 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4-kube-api-access-rkq8q" (OuterVolumeSpecName: "kube-api-access-rkq8q") pod "5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4" (UID: "5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4"). InnerVolumeSpecName "kube-api-access-rkq8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.329596 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd0c3333-bd69-477d-9989-1c614cf6c2c6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.329630 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xx5g\" (UniqueName: \"kubernetes.io/projected/fd0c3333-bd69-477d-9989-1c614cf6c2c6-kube-api-access-8xx5g\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.329642 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84vvm\" (UniqueName: \"kubernetes.io/projected/6d25b79c-9651-4e87-acd1-85b3d955cafa-kube-api-access-84vvm\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.329653 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkq8q\" (UniqueName: \"kubernetes.io/projected/5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4-kube-api-access-rkq8q\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.329662 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-787vm\" (UniqueName: \"kubernetes.io/projected/34cd0e67-a753-4a1b-83c5-00f478cb5dff-kube-api-access-787vm\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.329670 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrc6x\" (UniqueName: \"kubernetes.io/projected/e60af70b-0981-447f-8697-ac2689821fb8-kube-api-access-jrc6x\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.329678 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9bgq\" (UniqueName: \"kubernetes.io/projected/cd65e318-075f-4c1c-9b0c-9a3ada36a63e-kube-api-access-r9bgq\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.398310 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb350364-28f0-4ae8-b83f-8caf99a4c9be" path="/var/lib/kubelet/pods/fb350364-28f0-4ae8-b83f-8caf99a4c9be/volumes" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.594946 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6797-account-create-update-4lnzn" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.594955 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6797-account-create-update-4lnzn" event={"ID":"34cd0e67-a753-4a1b-83c5-00f478cb5dff","Type":"ContainerDied","Data":"7f0ddf3a375feacff8f020763b7bc384451016b2018c0c86a67a42289095a1b5"} Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.595092 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f0ddf3a375feacff8f020763b7bc384451016b2018c0c86a67a42289095a1b5" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.596597 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4267-account-create-update-rmqmq" event={"ID":"fd0c3333-bd69-477d-9989-1c614cf6c2c6","Type":"ContainerDied","Data":"9063d0ce070699ca39169f045585fe99aeccc51e67aa114e4ed60738841753b8"} Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.596621 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9063d0ce070699ca39169f045585fe99aeccc51e67aa114e4ed60738841753b8" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.596730 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4267-account-create-update-rmqmq" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.598465 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-93c6-account-create-update-pn2qf" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.598533 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-93c6-account-create-update-pn2qf" event={"ID":"cd65e318-075f-4c1c-9b0c-9a3ada36a63e","Type":"ContainerDied","Data":"2fc0aeec8e48458bed978d7950d3602004dc90008bd35c3eb010b57628b68c14"} Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.598573 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2fc0aeec8e48458bed978d7950d3602004dc90008bd35c3eb010b57628b68c14" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.600822 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-krrzw" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.600819 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-krrzw" event={"ID":"5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4","Type":"ContainerDied","Data":"5e15e98695331f0044c83e90aad4ef4a554483c8d47455271c9938d38eea450a"} Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.600933 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e15e98695331f0044c83e90aad4ef4a554483c8d47455271c9938d38eea450a" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.602189 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-kwtnr" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.602206 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-kwtnr" event={"ID":"e60af70b-0981-447f-8697-ac2689821fb8","Type":"ContainerDied","Data":"321aa3569ffbe9102edaedf2e61d422dd78cbbfaeca5fc68c674539460922b66"} Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.602474 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="321aa3569ffbe9102edaedf2e61d422dd78cbbfaeca5fc68c674539460922b66" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.603757 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.603795 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-d6gts" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.603845 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-d6gts" event={"ID":"6d25b79c-9651-4e87-acd1-85b3d955cafa","Type":"ContainerDied","Data":"380817042bf4478f7706c5e8645bded5510193ffaccd346c9b27db16c03fe717"} Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.603865 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="380817042bf4478f7706c5e8645bded5510193ffaccd346c9b27db16c03fe717" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.611999 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.736539 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9437a7-7b30-4bf1-af7a-e2aa96919506-run-httpd\") pod \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.736735 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-sg-core-conf-yaml\") pod \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.736835 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-combined-ca-bundle\") pod \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.736861 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-scripts\") pod \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.736909 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpzk9\" (UniqueName: \"kubernetes.io/projected/eb9437a7-7b30-4bf1-af7a-e2aa96919506-kube-api-access-xpzk9\") pod \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.736933 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9437a7-7b30-4bf1-af7a-e2aa96919506-log-httpd\") pod \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.736950 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-config-data\") pod \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\" (UID: \"eb9437a7-7b30-4bf1-af7a-e2aa96919506\") " Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.737344 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb9437a7-7b30-4bf1-af7a-e2aa96919506-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "eb9437a7-7b30-4bf1-af7a-e2aa96919506" (UID: "eb9437a7-7b30-4bf1-af7a-e2aa96919506"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.737363 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb9437a7-7b30-4bf1-af7a-e2aa96919506-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "eb9437a7-7b30-4bf1-af7a-e2aa96919506" (UID: "eb9437a7-7b30-4bf1-af7a-e2aa96919506"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.738238 4644 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9437a7-7b30-4bf1-af7a-e2aa96919506-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.738263 4644 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9437a7-7b30-4bf1-af7a-e2aa96919506-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.740640 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "eb9437a7-7b30-4bf1-af7a-e2aa96919506" (UID: "eb9437a7-7b30-4bf1-af7a-e2aa96919506"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.741024 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eb9437a7-7b30-4bf1-af7a-e2aa96919506" (UID: "eb9437a7-7b30-4bf1-af7a-e2aa96919506"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.741048 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-scripts" (OuterVolumeSpecName: "scripts") pod "eb9437a7-7b30-4bf1-af7a-e2aa96919506" (UID: "eb9437a7-7b30-4bf1-af7a-e2aa96919506"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.741730 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-config-data" (OuterVolumeSpecName: "config-data") pod "eb9437a7-7b30-4bf1-af7a-e2aa96919506" (UID: "eb9437a7-7b30-4bf1-af7a-e2aa96919506"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.741801 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb9437a7-7b30-4bf1-af7a-e2aa96919506-kube-api-access-xpzk9" (OuterVolumeSpecName: "kube-api-access-xpzk9") pod "eb9437a7-7b30-4bf1-af7a-e2aa96919506" (UID: "eb9437a7-7b30-4bf1-af7a-e2aa96919506"). InnerVolumeSpecName "kube-api-access-xpzk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.841045 4644 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.841153 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.841165 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.841173 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpzk9\" (UniqueName: \"kubernetes.io/projected/eb9437a7-7b30-4bf1-af7a-e2aa96919506-kube-api-access-xpzk9\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:58 crc kubenswrapper[4644]: I1213 07:02:58.841232 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb9437a7-7b30-4bf1-af7a-e2aa96919506-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.567136 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.611970 4644 generic.go:334] "Generic (PLEG): container finished" podID="fa9ae998-069d-4264-a6d3-2a8f51373524" containerID="6d04b355eab221d23c1b8cfaf6910e1003738fd4205b9839ef41f687e14d4e45" exitCode=137 Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.612037 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7dc5bbd594-tswgs" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.612057 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.612062 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7dc5bbd594-tswgs" event={"ID":"fa9ae998-069d-4264-a6d3-2a8f51373524","Type":"ContainerDied","Data":"6d04b355eab221d23c1b8cfaf6910e1003738fd4205b9839ef41f687e14d4e45"} Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.612101 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7dc5bbd594-tswgs" event={"ID":"fa9ae998-069d-4264-a6d3-2a8f51373524","Type":"ContainerDied","Data":"7981da8d80e72de66d4fd8b6061cce48d179cee700285041efd642663be2fb1a"} Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.612119 4644 scope.go:117] "RemoveContainer" containerID="c8e61f66d6f403376205f93aedfe0ee73826f002c15e69a024c618f920b7ed96" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.659280 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa9ae998-069d-4264-a6d3-2a8f51373524-config-data\") pod \"fa9ae998-069d-4264-a6d3-2a8f51373524\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.659350 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hnqp\" (UniqueName: \"kubernetes.io/projected/fa9ae998-069d-4264-a6d3-2a8f51373524-kube-api-access-8hnqp\") pod \"fa9ae998-069d-4264-a6d3-2a8f51373524\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.659576 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa9ae998-069d-4264-a6d3-2a8f51373524-scripts\") pod \"fa9ae998-069d-4264-a6d3-2a8f51373524\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.659609 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-horizon-tls-certs\") pod \"fa9ae998-069d-4264-a6d3-2a8f51373524\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.659663 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa9ae998-069d-4264-a6d3-2a8f51373524-logs\") pod \"fa9ae998-069d-4264-a6d3-2a8f51373524\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.659707 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-combined-ca-bundle\") pod \"fa9ae998-069d-4264-a6d3-2a8f51373524\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.659741 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-horizon-secret-key\") pod \"fa9ae998-069d-4264-a6d3-2a8f51373524\" (UID: \"fa9ae998-069d-4264-a6d3-2a8f51373524\") " Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.663522 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.663849 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa9ae998-069d-4264-a6d3-2a8f51373524-logs" (OuterVolumeSpecName: "logs") pod "fa9ae998-069d-4264-a6d3-2a8f51373524" (UID: "fa9ae998-069d-4264-a6d3-2a8f51373524"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.664840 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "fa9ae998-069d-4264-a6d3-2a8f51373524" (UID: "fa9ae998-069d-4264-a6d3-2a8f51373524"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.669277 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.669409 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa9ae998-069d-4264-a6d3-2a8f51373524-kube-api-access-8hnqp" (OuterVolumeSpecName: "kube-api-access-8hnqp") pod "fa9ae998-069d-4264-a6d3-2a8f51373524" (UID: "fa9ae998-069d-4264-a6d3-2a8f51373524"). InnerVolumeSpecName "kube-api-access-8hnqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.678604 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:59 crc kubenswrapper[4644]: E1213 07:02:59.679077 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd65e318-075f-4c1c-9b0c-9a3ada36a63e" containerName="mariadb-account-create-update" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679093 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd65e318-075f-4c1c-9b0c-9a3ada36a63e" containerName="mariadb-account-create-update" Dec 13 07:02:59 crc kubenswrapper[4644]: E1213 07:02:59.679107 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa9ae998-069d-4264-a6d3-2a8f51373524" containerName="horizon-log" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679112 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa9ae998-069d-4264-a6d3-2a8f51373524" containerName="horizon-log" Dec 13 07:02:59 crc kubenswrapper[4644]: E1213 07:02:59.679134 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d25b79c-9651-4e87-acd1-85b3d955cafa" containerName="mariadb-database-create" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679139 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d25b79c-9651-4e87-acd1-85b3d955cafa" containerName="mariadb-database-create" Dec 13 07:02:59 crc kubenswrapper[4644]: E1213 07:02:59.679150 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34cd0e67-a753-4a1b-83c5-00f478cb5dff" containerName="mariadb-account-create-update" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679156 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="34cd0e67-a753-4a1b-83c5-00f478cb5dff" containerName="mariadb-account-create-update" Dec 13 07:02:59 crc kubenswrapper[4644]: E1213 07:02:59.679170 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd0c3333-bd69-477d-9989-1c614cf6c2c6" containerName="mariadb-account-create-update" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679175 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd0c3333-bd69-477d-9989-1c614cf6c2c6" containerName="mariadb-account-create-update" Dec 13 07:02:59 crc kubenswrapper[4644]: E1213 07:02:59.679187 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4" containerName="mariadb-database-create" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679192 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4" containerName="mariadb-database-create" Dec 13 07:02:59 crc kubenswrapper[4644]: E1213 07:02:59.679205 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa9ae998-069d-4264-a6d3-2a8f51373524" containerName="horizon" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679210 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa9ae998-069d-4264-a6d3-2a8f51373524" containerName="horizon" Dec 13 07:02:59 crc kubenswrapper[4644]: E1213 07:02:59.679225 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e60af70b-0981-447f-8697-ac2689821fb8" containerName="mariadb-database-create" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679230 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="e60af70b-0981-447f-8697-ac2689821fb8" containerName="mariadb-database-create" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679406 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="e60af70b-0981-447f-8697-ac2689821fb8" containerName="mariadb-database-create" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679419 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd65e318-075f-4c1c-9b0c-9a3ada36a63e" containerName="mariadb-account-create-update" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679427 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4" containerName="mariadb-database-create" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679455 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa9ae998-069d-4264-a6d3-2a8f51373524" containerName="horizon" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679464 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="34cd0e67-a753-4a1b-83c5-00f478cb5dff" containerName="mariadb-account-create-update" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679474 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa9ae998-069d-4264-a6d3-2a8f51373524" containerName="horizon-log" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679483 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d25b79c-9651-4e87-acd1-85b3d955cafa" containerName="mariadb-database-create" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.679497 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd0c3333-bd69-477d-9989-1c614cf6c2c6" containerName="mariadb-account-create-update" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.681244 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.683372 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa9ae998-069d-4264-a6d3-2a8f51373524-scripts" (OuterVolumeSpecName: "scripts") pod "fa9ae998-069d-4264-a6d3-2a8f51373524" (UID: "fa9ae998-069d-4264-a6d3-2a8f51373524"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.684767 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.684944 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.695176 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa9ae998-069d-4264-a6d3-2a8f51373524-config-data" (OuterVolumeSpecName: "config-data") pod "fa9ae998-069d-4264-a6d3-2a8f51373524" (UID: "fa9ae998-069d-4264-a6d3-2a8f51373524"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.698649 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa9ae998-069d-4264-a6d3-2a8f51373524" (UID: "fa9ae998-069d-4264-a6d3-2a8f51373524"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.711799 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.729863 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "fa9ae998-069d-4264-a6d3-2a8f51373524" (UID: "fa9ae998-069d-4264-a6d3-2a8f51373524"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.756715 4644 scope.go:117] "RemoveContainer" containerID="6d04b355eab221d23c1b8cfaf6910e1003738fd4205b9839ef41f687e14d4e45" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.761994 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-run-httpd\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.762049 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-config-data\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.762123 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-scripts\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.762148 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-log-httpd\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.762181 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.762208 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.762352 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82hmd\" (UniqueName: \"kubernetes.io/projected/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-kube-api-access-82hmd\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.762585 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.762606 4644 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.762615 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fa9ae998-069d-4264-a6d3-2a8f51373524-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.762625 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hnqp\" (UniqueName: \"kubernetes.io/projected/fa9ae998-069d-4264-a6d3-2a8f51373524-kube-api-access-8hnqp\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.762634 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa9ae998-069d-4264-a6d3-2a8f51373524-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.762642 4644 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa9ae998-069d-4264-a6d3-2a8f51373524-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.762650 4644 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa9ae998-069d-4264-a6d3-2a8f51373524-logs\") on node \"crc\" DevicePath \"\"" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.771997 4644 scope.go:117] "RemoveContainer" containerID="c8e61f66d6f403376205f93aedfe0ee73826f002c15e69a024c618f920b7ed96" Dec 13 07:02:59 crc kubenswrapper[4644]: E1213 07:02:59.772518 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8e61f66d6f403376205f93aedfe0ee73826f002c15e69a024c618f920b7ed96\": container with ID starting with c8e61f66d6f403376205f93aedfe0ee73826f002c15e69a024c618f920b7ed96 not found: ID does not exist" containerID="c8e61f66d6f403376205f93aedfe0ee73826f002c15e69a024c618f920b7ed96" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.772561 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8e61f66d6f403376205f93aedfe0ee73826f002c15e69a024c618f920b7ed96"} err="failed to get container status \"c8e61f66d6f403376205f93aedfe0ee73826f002c15e69a024c618f920b7ed96\": rpc error: code = NotFound desc = could not find container \"c8e61f66d6f403376205f93aedfe0ee73826f002c15e69a024c618f920b7ed96\": container with ID starting with c8e61f66d6f403376205f93aedfe0ee73826f002c15e69a024c618f920b7ed96 not found: ID does not exist" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.772588 4644 scope.go:117] "RemoveContainer" containerID="6d04b355eab221d23c1b8cfaf6910e1003738fd4205b9839ef41f687e14d4e45" Dec 13 07:02:59 crc kubenswrapper[4644]: E1213 07:02:59.773052 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d04b355eab221d23c1b8cfaf6910e1003738fd4205b9839ef41f687e14d4e45\": container with ID starting with 6d04b355eab221d23c1b8cfaf6910e1003738fd4205b9839ef41f687e14d4e45 not found: ID does not exist" containerID="6d04b355eab221d23c1b8cfaf6910e1003738fd4205b9839ef41f687e14d4e45" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.773081 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d04b355eab221d23c1b8cfaf6910e1003738fd4205b9839ef41f687e14d4e45"} err="failed to get container status \"6d04b355eab221d23c1b8cfaf6910e1003738fd4205b9839ef41f687e14d4e45\": rpc error: code = NotFound desc = could not find container \"6d04b355eab221d23c1b8cfaf6910e1003738fd4205b9839ef41f687e14d4e45\": container with ID starting with 6d04b355eab221d23c1b8cfaf6910e1003738fd4205b9839ef41f687e14d4e45 not found: ID does not exist" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.864283 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82hmd\" (UniqueName: \"kubernetes.io/projected/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-kube-api-access-82hmd\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.864380 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-run-httpd\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.864415 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-config-data\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.864507 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-scripts\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.864536 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-log-httpd\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.864569 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.864595 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.865853 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-log-httpd\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.866131 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-run-httpd\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.868087 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.868201 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.870139 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-scripts\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.870497 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-config-data\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.880406 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82hmd\" (UniqueName: \"kubernetes.io/projected/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-kube-api-access-82hmd\") pod \"ceilometer-0\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " pod="openstack/ceilometer-0" Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.949963 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7dc5bbd594-tswgs"] Dec 13 07:02:59 crc kubenswrapper[4644]: I1213 07:02:59.957260 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7dc5bbd594-tswgs"] Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.020377 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.080583 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-mkjbp"] Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.081703 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.084727 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-9qfdb" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.084837 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.084987 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.093152 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-mkjbp"] Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.169956 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-mkjbp\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.170009 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffmn4\" (UniqueName: \"kubernetes.io/projected/549d3d46-a7b0-4b33-b991-80b4eab06548-kube-api-access-ffmn4\") pod \"nova-cell0-conductor-db-sync-mkjbp\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.170060 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-scripts\") pod \"nova-cell0-conductor-db-sync-mkjbp\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.170080 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-config-data\") pod \"nova-cell0-conductor-db-sync-mkjbp\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.272177 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-mkjbp\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.272231 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffmn4\" (UniqueName: \"kubernetes.io/projected/549d3d46-a7b0-4b33-b991-80b4eab06548-kube-api-access-ffmn4\") pod \"nova-cell0-conductor-db-sync-mkjbp\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.272300 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-scripts\") pod \"nova-cell0-conductor-db-sync-mkjbp\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.272318 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-config-data\") pod \"nova-cell0-conductor-db-sync-mkjbp\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.277286 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-config-data\") pod \"nova-cell0-conductor-db-sync-mkjbp\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.283736 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-scripts\") pod \"nova-cell0-conductor-db-sync-mkjbp\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.310949 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffmn4\" (UniqueName: \"kubernetes.io/projected/549d3d46-a7b0-4b33-b991-80b4eab06548-kube-api-access-ffmn4\") pod \"nova-cell0-conductor-db-sync-mkjbp\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.310949 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-mkjbp\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.397637 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb9437a7-7b30-4bf1-af7a-e2aa96919506" path="/var/lib/kubelet/pods/eb9437a7-7b30-4bf1-af7a-e2aa96919506/volumes" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.398139 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa9ae998-069d-4264-a6d3-2a8f51373524" path="/var/lib/kubelet/pods/fa9ae998-069d-4264-a6d3-2a8f51373524/volumes" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.417507 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.467186 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:03:00 crc kubenswrapper[4644]: W1213 07:03:00.472643 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d460ab8_7ac0_4365_bd28_25d5cc56f87e.slice/crio-e1ba2e9edbe8e19b9c9a9d898a70959d5ffda83246079b6d98e9713d3bbf07fe WatchSource:0}: Error finding container e1ba2e9edbe8e19b9c9a9d898a70959d5ffda83246079b6d98e9713d3bbf07fe: Status 404 returned error can't find the container with id e1ba2e9edbe8e19b9c9a9d898a70959d5ffda83246079b6d98e9713d3bbf07fe Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.620201 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d460ab8-7ac0-4365-bd28-25d5cc56f87e","Type":"ContainerStarted","Data":"e1ba2e9edbe8e19b9c9a9d898a70959d5ffda83246079b6d98e9713d3bbf07fe"} Dec 13 07:03:00 crc kubenswrapper[4644]: I1213 07:03:00.798672 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-mkjbp"] Dec 13 07:03:00 crc kubenswrapper[4644]: W1213 07:03:00.803519 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod549d3d46_a7b0_4b33_b991_80b4eab06548.slice/crio-41b77f785d2e4c49a68a7174e2e1581055e86e4341ec00e6a93cfd5741b7afd9 WatchSource:0}: Error finding container 41b77f785d2e4c49a68a7174e2e1581055e86e4341ec00e6a93cfd5741b7afd9: Status 404 returned error can't find the container with id 41b77f785d2e4c49a68a7174e2e1581055e86e4341ec00e6a93cfd5741b7afd9 Dec 13 07:03:01 crc kubenswrapper[4644]: I1213 07:03:01.628690 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d460ab8-7ac0-4365-bd28-25d5cc56f87e","Type":"ContainerStarted","Data":"b8a46d7135ab98fb40146b969e2ec13e71c9ef2c13a896f4daf0382eaa27393d"} Dec 13 07:03:01 crc kubenswrapper[4644]: I1213 07:03:01.630152 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-mkjbp" event={"ID":"549d3d46-a7b0-4b33-b991-80b4eab06548","Type":"ContainerStarted","Data":"41b77f785d2e4c49a68a7174e2e1581055e86e4341ec00e6a93cfd5741b7afd9"} Dec 13 07:03:02 crc kubenswrapper[4644]: I1213 07:03:02.639709 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d460ab8-7ac0-4365-bd28-25d5cc56f87e","Type":"ContainerStarted","Data":"888cc0c6556f7a1816046e178f6d87a1b0a660ab595527e7dbe5052bb3832744"} Dec 13 07:03:03 crc kubenswrapper[4644]: I1213 07:03:03.657620 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d460ab8-7ac0-4365-bd28-25d5cc56f87e","Type":"ContainerStarted","Data":"bd009acdc387cc944b4eac05bd8a205c8e73c5c01157aa342fa37be954921fb6"} Dec 13 07:03:07 crc kubenswrapper[4644]: I1213 07:03:07.691672 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d460ab8-7ac0-4365-bd28-25d5cc56f87e","Type":"ContainerStarted","Data":"42b82e68785d2ca0644e753313910074f4e2847d0d34f768f4da98f716cb940e"} Dec 13 07:03:07 crc kubenswrapper[4644]: I1213 07:03:07.692605 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 07:03:07 crc kubenswrapper[4644]: I1213 07:03:07.693647 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-mkjbp" event={"ID":"549d3d46-a7b0-4b33-b991-80b4eab06548","Type":"ContainerStarted","Data":"0534f0d864eab5a9c9bf5a3bca7456b016acf421f62786703aae5b42c371c224"} Dec 13 07:03:07 crc kubenswrapper[4644]: I1213 07:03:07.711457 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.202046236 podStartE2EDuration="8.711423982s" podCreationTimestamp="2025-12-13 07:02:59 +0000 UTC" firstStartedPulling="2025-12-13 07:03:00.476343113 +0000 UTC m=+1042.691293946" lastFinishedPulling="2025-12-13 07:03:06.985720859 +0000 UTC m=+1049.200671692" observedRunningTime="2025-12-13 07:03:07.709464767 +0000 UTC m=+1049.924415600" watchObservedRunningTime="2025-12-13 07:03:07.711423982 +0000 UTC m=+1049.926374815" Dec 13 07:03:07 crc kubenswrapper[4644]: I1213 07:03:07.729552 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-mkjbp" podStartSLOduration=1.534584325 podStartE2EDuration="7.729539327s" podCreationTimestamp="2025-12-13 07:03:00 +0000 UTC" firstStartedPulling="2025-12-13 07:03:00.805581617 +0000 UTC m=+1043.020532441" lastFinishedPulling="2025-12-13 07:03:07.00053661 +0000 UTC m=+1049.215487443" observedRunningTime="2025-12-13 07:03:07.723372961 +0000 UTC m=+1049.938323794" watchObservedRunningTime="2025-12-13 07:03:07.729539327 +0000 UTC m=+1049.944490160" Dec 13 07:03:09 crc kubenswrapper[4644]: I1213 07:03:09.719692 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:03:09 crc kubenswrapper[4644]: I1213 07:03:09.720110 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="ceilometer-central-agent" containerID="cri-o://b8a46d7135ab98fb40146b969e2ec13e71c9ef2c13a896f4daf0382eaa27393d" gracePeriod=30 Dec 13 07:03:09 crc kubenswrapper[4644]: I1213 07:03:09.720236 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="proxy-httpd" containerID="cri-o://42b82e68785d2ca0644e753313910074f4e2847d0d34f768f4da98f716cb940e" gracePeriod=30 Dec 13 07:03:09 crc kubenswrapper[4644]: I1213 07:03:09.720269 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="ceilometer-notification-agent" containerID="cri-o://888cc0c6556f7a1816046e178f6d87a1b0a660ab595527e7dbe5052bb3832744" gracePeriod=30 Dec 13 07:03:09 crc kubenswrapper[4644]: I1213 07:03:09.720205 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="sg-core" containerID="cri-o://bd009acdc387cc944b4eac05bd8a205c8e73c5c01157aa342fa37be954921fb6" gracePeriod=30 Dec 13 07:03:10 crc kubenswrapper[4644]: I1213 07:03:10.720185 4644 generic.go:334] "Generic (PLEG): container finished" podID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerID="42b82e68785d2ca0644e753313910074f4e2847d0d34f768f4da98f716cb940e" exitCode=0 Dec 13 07:03:10 crc kubenswrapper[4644]: I1213 07:03:10.720386 4644 generic.go:334] "Generic (PLEG): container finished" podID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerID="bd009acdc387cc944b4eac05bd8a205c8e73c5c01157aa342fa37be954921fb6" exitCode=2 Dec 13 07:03:10 crc kubenswrapper[4644]: I1213 07:03:10.720395 4644 generic.go:334] "Generic (PLEG): container finished" podID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerID="b8a46d7135ab98fb40146b969e2ec13e71c9ef2c13a896f4daf0382eaa27393d" exitCode=0 Dec 13 07:03:10 crc kubenswrapper[4644]: I1213 07:03:10.720416 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d460ab8-7ac0-4365-bd28-25d5cc56f87e","Type":"ContainerDied","Data":"42b82e68785d2ca0644e753313910074f4e2847d0d34f768f4da98f716cb940e"} Dec 13 07:03:10 crc kubenswrapper[4644]: I1213 07:03:10.720452 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d460ab8-7ac0-4365-bd28-25d5cc56f87e","Type":"ContainerDied","Data":"bd009acdc387cc944b4eac05bd8a205c8e73c5c01157aa342fa37be954921fb6"} Dec 13 07:03:10 crc kubenswrapper[4644]: I1213 07:03:10.720463 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d460ab8-7ac0-4365-bd28-25d5cc56f87e","Type":"ContainerDied","Data":"b8a46d7135ab98fb40146b969e2ec13e71c9ef2c13a896f4daf0382eaa27393d"} Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.227714 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.300475 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-scripts\") pod \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.300556 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-sg-core-conf-yaml\") pod \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.300720 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-run-httpd\") pod \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.300880 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82hmd\" (UniqueName: \"kubernetes.io/projected/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-kube-api-access-82hmd\") pod \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.300940 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-combined-ca-bundle\") pod \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.300974 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-config-data\") pod \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.301668 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-log-httpd\") pod \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\" (UID: \"1d460ab8-7ac0-4365-bd28-25d5cc56f87e\") " Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.301002 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1d460ab8-7ac0-4365-bd28-25d5cc56f87e" (UID: "1d460ab8-7ac0-4365-bd28-25d5cc56f87e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.302476 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1d460ab8-7ac0-4365-bd28-25d5cc56f87e" (UID: "1d460ab8-7ac0-4365-bd28-25d5cc56f87e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.302608 4644 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.302628 4644 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.305940 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-kube-api-access-82hmd" (OuterVolumeSpecName: "kube-api-access-82hmd") pod "1d460ab8-7ac0-4365-bd28-25d5cc56f87e" (UID: "1d460ab8-7ac0-4365-bd28-25d5cc56f87e"). InnerVolumeSpecName "kube-api-access-82hmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.306268 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-scripts" (OuterVolumeSpecName: "scripts") pod "1d460ab8-7ac0-4365-bd28-25d5cc56f87e" (UID: "1d460ab8-7ac0-4365-bd28-25d5cc56f87e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.329644 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1d460ab8-7ac0-4365-bd28-25d5cc56f87e" (UID: "1d460ab8-7ac0-4365-bd28-25d5cc56f87e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.358317 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d460ab8-7ac0-4365-bd28-25d5cc56f87e" (UID: "1d460ab8-7ac0-4365-bd28-25d5cc56f87e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.381895 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-config-data" (OuterVolumeSpecName: "config-data") pod "1d460ab8-7ac0-4365-bd28-25d5cc56f87e" (UID: "1d460ab8-7ac0-4365-bd28-25d5cc56f87e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.404019 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82hmd\" (UniqueName: \"kubernetes.io/projected/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-kube-api-access-82hmd\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.404052 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.404063 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.404073 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.404081 4644 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1d460ab8-7ac0-4365-bd28-25d5cc56f87e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.728557 4644 generic.go:334] "Generic (PLEG): container finished" podID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerID="888cc0c6556f7a1816046e178f6d87a1b0a660ab595527e7dbe5052bb3832744" exitCode=0 Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.728613 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.728618 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d460ab8-7ac0-4365-bd28-25d5cc56f87e","Type":"ContainerDied","Data":"888cc0c6556f7a1816046e178f6d87a1b0a660ab595527e7dbe5052bb3832744"} Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.728683 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d460ab8-7ac0-4365-bd28-25d5cc56f87e","Type":"ContainerDied","Data":"e1ba2e9edbe8e19b9c9a9d898a70959d5ffda83246079b6d98e9713d3bbf07fe"} Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.728713 4644 scope.go:117] "RemoveContainer" containerID="42b82e68785d2ca0644e753313910074f4e2847d0d34f768f4da98f716cb940e" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.746577 4644 scope.go:117] "RemoveContainer" containerID="bd009acdc387cc944b4eac05bd8a205c8e73c5c01157aa342fa37be954921fb6" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.759235 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.768865 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.769205 4644 scope.go:117] "RemoveContainer" containerID="888cc0c6556f7a1816046e178f6d87a1b0a660ab595527e7dbe5052bb3832744" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.786979 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:03:11 crc kubenswrapper[4644]: E1213 07:03:11.787503 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="ceilometer-notification-agent" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.787538 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="ceilometer-notification-agent" Dec 13 07:03:11 crc kubenswrapper[4644]: E1213 07:03:11.787567 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="sg-core" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.787573 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="sg-core" Dec 13 07:03:11 crc kubenswrapper[4644]: E1213 07:03:11.787587 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="proxy-httpd" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.787592 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="proxy-httpd" Dec 13 07:03:11 crc kubenswrapper[4644]: E1213 07:03:11.787618 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="ceilometer-central-agent" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.787624 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="ceilometer-central-agent" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.787874 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="ceilometer-central-agent" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.787894 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="proxy-httpd" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.787909 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="sg-core" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.787938 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" containerName="ceilometer-notification-agent" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.792325 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.792752 4644 scope.go:117] "RemoveContainer" containerID="b8a46d7135ab98fb40146b969e2ec13e71c9ef2c13a896f4daf0382eaa27393d" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.809935 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.811856 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.812935 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-scripts\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.812991 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.813066 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-log-httpd\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.813172 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-run-httpd\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.813269 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-config-data\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.813316 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4hlg\" (UniqueName: \"kubernetes.io/projected/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-kube-api-access-g4hlg\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.813532 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.819847 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.821648 4644 scope.go:117] "RemoveContainer" containerID="42b82e68785d2ca0644e753313910074f4e2847d0d34f768f4da98f716cb940e" Dec 13 07:03:11 crc kubenswrapper[4644]: E1213 07:03:11.823050 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42b82e68785d2ca0644e753313910074f4e2847d0d34f768f4da98f716cb940e\": container with ID starting with 42b82e68785d2ca0644e753313910074f4e2847d0d34f768f4da98f716cb940e not found: ID does not exist" containerID="42b82e68785d2ca0644e753313910074f4e2847d0d34f768f4da98f716cb940e" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.823084 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42b82e68785d2ca0644e753313910074f4e2847d0d34f768f4da98f716cb940e"} err="failed to get container status \"42b82e68785d2ca0644e753313910074f4e2847d0d34f768f4da98f716cb940e\": rpc error: code = NotFound desc = could not find container \"42b82e68785d2ca0644e753313910074f4e2847d0d34f768f4da98f716cb940e\": container with ID starting with 42b82e68785d2ca0644e753313910074f4e2847d0d34f768f4da98f716cb940e not found: ID does not exist" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.823110 4644 scope.go:117] "RemoveContainer" containerID="bd009acdc387cc944b4eac05bd8a205c8e73c5c01157aa342fa37be954921fb6" Dec 13 07:03:11 crc kubenswrapper[4644]: E1213 07:03:11.824491 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd009acdc387cc944b4eac05bd8a205c8e73c5c01157aa342fa37be954921fb6\": container with ID starting with bd009acdc387cc944b4eac05bd8a205c8e73c5c01157aa342fa37be954921fb6 not found: ID does not exist" containerID="bd009acdc387cc944b4eac05bd8a205c8e73c5c01157aa342fa37be954921fb6" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.824515 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd009acdc387cc944b4eac05bd8a205c8e73c5c01157aa342fa37be954921fb6"} err="failed to get container status \"bd009acdc387cc944b4eac05bd8a205c8e73c5c01157aa342fa37be954921fb6\": rpc error: code = NotFound desc = could not find container \"bd009acdc387cc944b4eac05bd8a205c8e73c5c01157aa342fa37be954921fb6\": container with ID starting with bd009acdc387cc944b4eac05bd8a205c8e73c5c01157aa342fa37be954921fb6 not found: ID does not exist" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.824536 4644 scope.go:117] "RemoveContainer" containerID="888cc0c6556f7a1816046e178f6d87a1b0a660ab595527e7dbe5052bb3832744" Dec 13 07:03:11 crc kubenswrapper[4644]: E1213 07:03:11.825266 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"888cc0c6556f7a1816046e178f6d87a1b0a660ab595527e7dbe5052bb3832744\": container with ID starting with 888cc0c6556f7a1816046e178f6d87a1b0a660ab595527e7dbe5052bb3832744 not found: ID does not exist" containerID="888cc0c6556f7a1816046e178f6d87a1b0a660ab595527e7dbe5052bb3832744" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.825314 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"888cc0c6556f7a1816046e178f6d87a1b0a660ab595527e7dbe5052bb3832744"} err="failed to get container status \"888cc0c6556f7a1816046e178f6d87a1b0a660ab595527e7dbe5052bb3832744\": rpc error: code = NotFound desc = could not find container \"888cc0c6556f7a1816046e178f6d87a1b0a660ab595527e7dbe5052bb3832744\": container with ID starting with 888cc0c6556f7a1816046e178f6d87a1b0a660ab595527e7dbe5052bb3832744 not found: ID does not exist" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.825333 4644 scope.go:117] "RemoveContainer" containerID="b8a46d7135ab98fb40146b969e2ec13e71c9ef2c13a896f4daf0382eaa27393d" Dec 13 07:03:11 crc kubenswrapper[4644]: E1213 07:03:11.825768 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8a46d7135ab98fb40146b969e2ec13e71c9ef2c13a896f4daf0382eaa27393d\": container with ID starting with b8a46d7135ab98fb40146b969e2ec13e71c9ef2c13a896f4daf0382eaa27393d not found: ID does not exist" containerID="b8a46d7135ab98fb40146b969e2ec13e71c9ef2c13a896f4daf0382eaa27393d" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.825819 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8a46d7135ab98fb40146b969e2ec13e71c9ef2c13a896f4daf0382eaa27393d"} err="failed to get container status \"b8a46d7135ab98fb40146b969e2ec13e71c9ef2c13a896f4daf0382eaa27393d\": rpc error: code = NotFound desc = could not find container \"b8a46d7135ab98fb40146b969e2ec13e71c9ef2c13a896f4daf0382eaa27393d\": container with ID starting with b8a46d7135ab98fb40146b969e2ec13e71c9ef2c13a896f4daf0382eaa27393d not found: ID does not exist" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.934117 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-config-data\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.934206 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4hlg\" (UniqueName: \"kubernetes.io/projected/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-kube-api-access-g4hlg\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.934375 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.934559 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-scripts\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.934594 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.934654 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-log-httpd\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.934730 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-run-httpd\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.935140 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-run-httpd\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.935353 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-log-httpd\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.939333 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.949313 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-config-data\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.952920 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.961980 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-scripts\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:11 crc kubenswrapper[4644]: I1213 07:03:11.976980 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4hlg\" (UniqueName: \"kubernetes.io/projected/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-kube-api-access-g4hlg\") pod \"ceilometer-0\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " pod="openstack/ceilometer-0" Dec 13 07:03:12 crc kubenswrapper[4644]: I1213 07:03:12.121964 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:03:12 crc kubenswrapper[4644]: I1213 07:03:12.399153 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d460ab8-7ac0-4365-bd28-25d5cc56f87e" path="/var/lib/kubelet/pods/1d460ab8-7ac0-4365-bd28-25d5cc56f87e/volumes" Dec 13 07:03:12 crc kubenswrapper[4644]: I1213 07:03:12.502213 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:03:12 crc kubenswrapper[4644]: W1213 07:03:12.508592 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8f4fd88_62a2_4ff6_a724_5cfc2e15fb49.slice/crio-c3d44277d7246df484249428330b46d150179bd2ac142cf5ad147b63eaf9d2e4 WatchSource:0}: Error finding container c3d44277d7246df484249428330b46d150179bd2ac142cf5ad147b63eaf9d2e4: Status 404 returned error can't find the container with id c3d44277d7246df484249428330b46d150179bd2ac142cf5ad147b63eaf9d2e4 Dec 13 07:03:12 crc kubenswrapper[4644]: I1213 07:03:12.737549 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49","Type":"ContainerStarted","Data":"c3d44277d7246df484249428330b46d150179bd2ac142cf5ad147b63eaf9d2e4"} Dec 13 07:03:13 crc kubenswrapper[4644]: I1213 07:03:13.746802 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49","Type":"ContainerStarted","Data":"faf7135bdb68f28b30495758634b09f474e7c43de085aa43507a3dac29201f69"} Dec 13 07:03:13 crc kubenswrapper[4644]: I1213 07:03:13.749160 4644 generic.go:334] "Generic (PLEG): container finished" podID="549d3d46-a7b0-4b33-b991-80b4eab06548" containerID="0534f0d864eab5a9c9bf5a3bca7456b016acf421f62786703aae5b42c371c224" exitCode=0 Dec 13 07:03:13 crc kubenswrapper[4644]: I1213 07:03:13.749190 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-mkjbp" event={"ID":"549d3d46-a7b0-4b33-b991-80b4eab06548","Type":"ContainerDied","Data":"0534f0d864eab5a9c9bf5a3bca7456b016acf421f62786703aae5b42c371c224"} Dec 13 07:03:14 crc kubenswrapper[4644]: I1213 07:03:14.759593 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49","Type":"ContainerStarted","Data":"927e0b2ce482a998fa79cb7c8fa50cf94d7d42fe52fd04f3ebd7ec1b0d08277a"} Dec 13 07:03:14 crc kubenswrapper[4644]: I1213 07:03:14.759873 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49","Type":"ContainerStarted","Data":"d5fda868f9ffecbb4c063085543de14ab89960fea6fa63e0042427cb9f72369f"} Dec 13 07:03:14 crc kubenswrapper[4644]: I1213 07:03:14.998485 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.190488 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-config-data\") pod \"549d3d46-a7b0-4b33-b991-80b4eab06548\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.190552 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-scripts\") pod \"549d3d46-a7b0-4b33-b991-80b4eab06548\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.190581 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-combined-ca-bundle\") pod \"549d3d46-a7b0-4b33-b991-80b4eab06548\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.190692 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffmn4\" (UniqueName: \"kubernetes.io/projected/549d3d46-a7b0-4b33-b991-80b4eab06548-kube-api-access-ffmn4\") pod \"549d3d46-a7b0-4b33-b991-80b4eab06548\" (UID: \"549d3d46-a7b0-4b33-b991-80b4eab06548\") " Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.205203 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/549d3d46-a7b0-4b33-b991-80b4eab06548-kube-api-access-ffmn4" (OuterVolumeSpecName: "kube-api-access-ffmn4") pod "549d3d46-a7b0-4b33-b991-80b4eab06548" (UID: "549d3d46-a7b0-4b33-b991-80b4eab06548"). InnerVolumeSpecName "kube-api-access-ffmn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.208171 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-scripts" (OuterVolumeSpecName: "scripts") pod "549d3d46-a7b0-4b33-b991-80b4eab06548" (UID: "549d3d46-a7b0-4b33-b991-80b4eab06548"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.211834 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "549d3d46-a7b0-4b33-b991-80b4eab06548" (UID: "549d3d46-a7b0-4b33-b991-80b4eab06548"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.212317 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-config-data" (OuterVolumeSpecName: "config-data") pod "549d3d46-a7b0-4b33-b991-80b4eab06548" (UID: "549d3d46-a7b0-4b33-b991-80b4eab06548"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.294287 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.294323 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.294334 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/549d3d46-a7b0-4b33-b991-80b4eab06548-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.294349 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffmn4\" (UniqueName: \"kubernetes.io/projected/549d3d46-a7b0-4b33-b991-80b4eab06548-kube-api-access-ffmn4\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.767670 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-mkjbp" event={"ID":"549d3d46-a7b0-4b33-b991-80b4eab06548","Type":"ContainerDied","Data":"41b77f785d2e4c49a68a7174e2e1581055e86e4341ec00e6a93cfd5741b7afd9"} Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.767878 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41b77f785d2e4c49a68a7174e2e1581055e86e4341ec00e6a93cfd5741b7afd9" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.767917 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-mkjbp" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.859293 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 13 07:03:15 crc kubenswrapper[4644]: E1213 07:03:15.863685 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="549d3d46-a7b0-4b33-b991-80b4eab06548" containerName="nova-cell0-conductor-db-sync" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.863718 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="549d3d46-a7b0-4b33-b991-80b4eab06548" containerName="nova-cell0-conductor-db-sync" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.864174 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="549d3d46-a7b0-4b33-b991-80b4eab06548" containerName="nova-cell0-conductor-db-sync" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.866113 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.877664 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.878242 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-9qfdb" Dec 13 07:03:15 crc kubenswrapper[4644]: I1213 07:03:15.885415 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 13 07:03:16 crc kubenswrapper[4644]: I1213 07:03:16.011392 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qx8ln\" (UniqueName: \"kubernetes.io/projected/087d15a1-0ac0-4c1b-a301-280be2b50018-kube-api-access-qx8ln\") pod \"nova-cell0-conductor-0\" (UID: \"087d15a1-0ac0-4c1b-a301-280be2b50018\") " pod="openstack/nova-cell0-conductor-0" Dec 13 07:03:16 crc kubenswrapper[4644]: I1213 07:03:16.011786 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/087d15a1-0ac0-4c1b-a301-280be2b50018-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"087d15a1-0ac0-4c1b-a301-280be2b50018\") " pod="openstack/nova-cell0-conductor-0" Dec 13 07:03:16 crc kubenswrapper[4644]: I1213 07:03:16.011943 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/087d15a1-0ac0-4c1b-a301-280be2b50018-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"087d15a1-0ac0-4c1b-a301-280be2b50018\") " pod="openstack/nova-cell0-conductor-0" Dec 13 07:03:16 crc kubenswrapper[4644]: I1213 07:03:16.115234 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qx8ln\" (UniqueName: \"kubernetes.io/projected/087d15a1-0ac0-4c1b-a301-280be2b50018-kube-api-access-qx8ln\") pod \"nova-cell0-conductor-0\" (UID: \"087d15a1-0ac0-4c1b-a301-280be2b50018\") " pod="openstack/nova-cell0-conductor-0" Dec 13 07:03:16 crc kubenswrapper[4644]: I1213 07:03:16.115289 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/087d15a1-0ac0-4c1b-a301-280be2b50018-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"087d15a1-0ac0-4c1b-a301-280be2b50018\") " pod="openstack/nova-cell0-conductor-0" Dec 13 07:03:16 crc kubenswrapper[4644]: I1213 07:03:16.115318 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/087d15a1-0ac0-4c1b-a301-280be2b50018-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"087d15a1-0ac0-4c1b-a301-280be2b50018\") " pod="openstack/nova-cell0-conductor-0" Dec 13 07:03:16 crc kubenswrapper[4644]: I1213 07:03:16.120330 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/087d15a1-0ac0-4c1b-a301-280be2b50018-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"087d15a1-0ac0-4c1b-a301-280be2b50018\") " pod="openstack/nova-cell0-conductor-0" Dec 13 07:03:16 crc kubenswrapper[4644]: I1213 07:03:16.126223 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/087d15a1-0ac0-4c1b-a301-280be2b50018-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"087d15a1-0ac0-4c1b-a301-280be2b50018\") " pod="openstack/nova-cell0-conductor-0" Dec 13 07:03:16 crc kubenswrapper[4644]: I1213 07:03:16.133184 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qx8ln\" (UniqueName: \"kubernetes.io/projected/087d15a1-0ac0-4c1b-a301-280be2b50018-kube-api-access-qx8ln\") pod \"nova-cell0-conductor-0\" (UID: \"087d15a1-0ac0-4c1b-a301-280be2b50018\") " pod="openstack/nova-cell0-conductor-0" Dec 13 07:03:16 crc kubenswrapper[4644]: I1213 07:03:16.193992 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 13 07:03:16 crc kubenswrapper[4644]: I1213 07:03:16.656581 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 13 07:03:16 crc kubenswrapper[4644]: I1213 07:03:16.776357 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49","Type":"ContainerStarted","Data":"8eecf73f846b26c0de9e698d7a3018a15783ba62b371b0531cbee39d71c86136"} Dec 13 07:03:16 crc kubenswrapper[4644]: I1213 07:03:16.777314 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 07:03:16 crc kubenswrapper[4644]: I1213 07:03:16.777494 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"087d15a1-0ac0-4c1b-a301-280be2b50018","Type":"ContainerStarted","Data":"79e3cf5e04b5b083f045af6aa939c101e883f38055c4441cf059bd606c23c4c0"} Dec 13 07:03:16 crc kubenswrapper[4644]: I1213 07:03:16.796901 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.51767522 podStartE2EDuration="5.796884431s" podCreationTimestamp="2025-12-13 07:03:11 +0000 UTC" firstStartedPulling="2025-12-13 07:03:12.510489064 +0000 UTC m=+1054.725439898" lastFinishedPulling="2025-12-13 07:03:15.789698277 +0000 UTC m=+1058.004649109" observedRunningTime="2025-12-13 07:03:16.794424175 +0000 UTC m=+1059.009375008" watchObservedRunningTime="2025-12-13 07:03:16.796884431 +0000 UTC m=+1059.011835264" Dec 13 07:03:17 crc kubenswrapper[4644]: I1213 07:03:17.785957 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"087d15a1-0ac0-4c1b-a301-280be2b50018","Type":"ContainerStarted","Data":"f84a4f6794461c49f59dafbcf01e6f52038009e1e0ea0729008a1544c7b3b2d8"} Dec 13 07:03:17 crc kubenswrapper[4644]: I1213 07:03:17.786183 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 13 07:03:17 crc kubenswrapper[4644]: I1213 07:03:17.807531 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.807512268 podStartE2EDuration="2.807512268s" podCreationTimestamp="2025-12-13 07:03:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:03:17.801510302 +0000 UTC m=+1060.016461135" watchObservedRunningTime="2025-12-13 07:03:17.807512268 +0000 UTC m=+1060.022463102" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.218328 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.619611 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-z4btn"] Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.620822 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.622680 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.622892 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.628016 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-z4btn"] Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.747764 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-z4btn\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.748032 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dnn4\" (UniqueName: \"kubernetes.io/projected/5c88cb8b-6880-4be0-a4bc-467783f0f752-kube-api-access-4dnn4\") pod \"nova-cell0-cell-mapping-z4btn\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.748151 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-config-data\") pod \"nova-cell0-cell-mapping-z4btn\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.748173 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-scripts\") pod \"nova-cell0-cell-mapping-z4btn\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.778135 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.779453 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.780853 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.793693 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.809620 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.810669 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.818927 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.835019 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.849423 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dnn4\" (UniqueName: \"kubernetes.io/projected/5c88cb8b-6880-4be0-a4bc-467783f0f752-kube-api-access-4dnn4\") pod \"nova-cell0-cell-mapping-z4btn\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.849475 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-logs\") pod \"nova-api-0\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " pod="openstack/nova-api-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.849563 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " pod="openstack/nova-api-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.849616 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcj9n\" (UniqueName: \"kubernetes.io/projected/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-kube-api-access-tcj9n\") pod \"nova-scheduler-0\" (UID: \"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.849644 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-config-data\") pod \"nova-scheduler-0\" (UID: \"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.849690 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pztpd\" (UniqueName: \"kubernetes.io/projected/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-kube-api-access-pztpd\") pod \"nova-api-0\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " pod="openstack/nova-api-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.849741 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-config-data\") pod \"nova-cell0-cell-mapping-z4btn\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.849786 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-scripts\") pod \"nova-cell0-cell-mapping-z4btn\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.849860 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-config-data\") pod \"nova-api-0\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " pod="openstack/nova-api-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.849924 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-z4btn\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.849957 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.857554 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-z4btn\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.858937 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-config-data\") pod \"nova-cell0-cell-mapping-z4btn\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.859817 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-scripts\") pod \"nova-cell0-cell-mapping-z4btn\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.884328 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dnn4\" (UniqueName: \"kubernetes.io/projected/5c88cb8b-6880-4be0-a4bc-467783f0f752-kube-api-access-4dnn4\") pod \"nova-cell0-cell-mapping-z4btn\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.948864 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.951378 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-config-data\") pod \"nova-api-0\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " pod="openstack/nova-api-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.951467 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.951515 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-logs\") pod \"nova-api-0\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " pod="openstack/nova-api-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.951570 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " pod="openstack/nova-api-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.951603 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcj9n\" (UniqueName: \"kubernetes.io/projected/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-kube-api-access-tcj9n\") pod \"nova-scheduler-0\" (UID: \"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.951624 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-config-data\") pod \"nova-scheduler-0\" (UID: \"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.951656 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pztpd\" (UniqueName: \"kubernetes.io/projected/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-kube-api-access-pztpd\") pod \"nova-api-0\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " pod="openstack/nova-api-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.957197 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-config-data\") pod \"nova-scheduler-0\" (UID: \"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.958251 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-logs\") pod \"nova-api-0\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " pod="openstack/nova-api-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.958875 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-config-data\") pod \"nova-api-0\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " pod="openstack/nova-api-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.968976 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.973256 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " pod="openstack/nova-api-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.978193 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pztpd\" (UniqueName: \"kubernetes.io/projected/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-kube-api-access-pztpd\") pod \"nova-api-0\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " pod="openstack/nova-api-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.978651 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcj9n\" (UniqueName: \"kubernetes.io/projected/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-kube-api-access-tcj9n\") pod \"nova-scheduler-0\" (UID: \"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:21 crc kubenswrapper[4644]: I1213 07:03:21.998787 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.010027 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.017127 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.018398 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.033943 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.043978 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.050795 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.056623 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68h27\" (UniqueName: \"kubernetes.io/projected/4129e894-c37a-4cee-a69a-73c8eb58710e-kube-api-access-68h27\") pod \"nova-metadata-0\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " pod="openstack/nova-metadata-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.056754 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4129e894-c37a-4cee-a69a-73c8eb58710e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " pod="openstack/nova-metadata-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.056904 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwd4d\" (UniqueName: \"kubernetes.io/projected/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-kube-api-access-bwd4d\") pod \"nova-cell1-novncproxy-0\" (UID: \"85c349ef-fdeb-44d2-8cf2-a55533af4d5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.057203 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4129e894-c37a-4cee-a69a-73c8eb58710e-logs\") pod \"nova-metadata-0\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " pod="openstack/nova-metadata-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.057252 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"85c349ef-fdeb-44d2-8cf2-a55533af4d5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.057313 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4129e894-c37a-4cee-a69a-73c8eb58710e-config-data\") pod \"nova-metadata-0\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " pod="openstack/nova-metadata-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.057335 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"85c349ef-fdeb-44d2-8cf2-a55533af4d5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.073274 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.092530 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.123467 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8d97cbc7-mf4xx"] Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.125145 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.132424 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.138869 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8d97cbc7-mf4xx"] Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.158897 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68h27\" (UniqueName: \"kubernetes.io/projected/4129e894-c37a-4cee-a69a-73c8eb58710e-kube-api-access-68h27\") pod \"nova-metadata-0\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " pod="openstack/nova-metadata-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.159181 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-config\") pod \"dnsmasq-dns-8d97cbc7-mf4xx\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.159218 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-ovsdbserver-nb\") pod \"dnsmasq-dns-8d97cbc7-mf4xx\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.159275 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4129e894-c37a-4cee-a69a-73c8eb58710e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " pod="openstack/nova-metadata-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.159299 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlcpm\" (UniqueName: \"kubernetes.io/projected/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-kube-api-access-vlcpm\") pod \"dnsmasq-dns-8d97cbc7-mf4xx\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.159328 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-dns-svc\") pod \"dnsmasq-dns-8d97cbc7-mf4xx\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.159406 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwd4d\" (UniqueName: \"kubernetes.io/projected/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-kube-api-access-bwd4d\") pod \"nova-cell1-novncproxy-0\" (UID: \"85c349ef-fdeb-44d2-8cf2-a55533af4d5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.159568 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4129e894-c37a-4cee-a69a-73c8eb58710e-logs\") pod \"nova-metadata-0\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " pod="openstack/nova-metadata-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.159598 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"85c349ef-fdeb-44d2-8cf2-a55533af4d5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.159635 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-ovsdbserver-sb\") pod \"dnsmasq-dns-8d97cbc7-mf4xx\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.159650 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4129e894-c37a-4cee-a69a-73c8eb58710e-config-data\") pod \"nova-metadata-0\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " pod="openstack/nova-metadata-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.159668 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"85c349ef-fdeb-44d2-8cf2-a55533af4d5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.163463 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4129e894-c37a-4cee-a69a-73c8eb58710e-logs\") pod \"nova-metadata-0\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " pod="openstack/nova-metadata-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.167190 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4129e894-c37a-4cee-a69a-73c8eb58710e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " pod="openstack/nova-metadata-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.167211 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4129e894-c37a-4cee-a69a-73c8eb58710e-config-data\") pod \"nova-metadata-0\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " pod="openstack/nova-metadata-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.168028 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"85c349ef-fdeb-44d2-8cf2-a55533af4d5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.168246 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"85c349ef-fdeb-44d2-8cf2-a55533af4d5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.179165 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwd4d\" (UniqueName: \"kubernetes.io/projected/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-kube-api-access-bwd4d\") pod \"nova-cell1-novncproxy-0\" (UID: \"85c349ef-fdeb-44d2-8cf2-a55533af4d5b\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.182950 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68h27\" (UniqueName: \"kubernetes.io/projected/4129e894-c37a-4cee-a69a-73c8eb58710e-kube-api-access-68h27\") pod \"nova-metadata-0\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " pod="openstack/nova-metadata-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.221840 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.262696 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-dns-svc\") pod \"dnsmasq-dns-8d97cbc7-mf4xx\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.265131 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-ovsdbserver-sb\") pod \"dnsmasq-dns-8d97cbc7-mf4xx\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.265328 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-config\") pod \"dnsmasq-dns-8d97cbc7-mf4xx\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.265353 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-ovsdbserver-nb\") pod \"dnsmasq-dns-8d97cbc7-mf4xx\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.265512 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlcpm\" (UniqueName: \"kubernetes.io/projected/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-kube-api-access-vlcpm\") pod \"dnsmasq-dns-8d97cbc7-mf4xx\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.267008 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-dns-svc\") pod \"dnsmasq-dns-8d97cbc7-mf4xx\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.268682 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-ovsdbserver-sb\") pod \"dnsmasq-dns-8d97cbc7-mf4xx\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.270748 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-ovsdbserver-nb\") pod \"dnsmasq-dns-8d97cbc7-mf4xx\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.271506 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-config\") pod \"dnsmasq-dns-8d97cbc7-mf4xx\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.289485 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlcpm\" (UniqueName: \"kubernetes.io/projected/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-kube-api-access-vlcpm\") pod \"dnsmasq-dns-8d97cbc7-mf4xx\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.482387 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.523974 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.546142 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-z4btn"] Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.667556 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nq9b5"] Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.668873 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.678974 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.679970 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.685362 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nq9b5"] Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.692377 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 07:03:22 crc kubenswrapper[4644]: W1213 07:03:22.694115 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ebf88d3_b1ff_43eb_8f15_e1f6b3a7700a.slice/crio-0e51e6a65e627e121886da88e4291bdd410d834a97dc47644643546e0caa6c88 WatchSource:0}: Error finding container 0e51e6a65e627e121886da88e4291bdd410d834a97dc47644643546e0caa6c88: Status 404 returned error can't find the container with id 0e51e6a65e627e121886da88e4291bdd410d834a97dc47644643546e0caa6c88 Dec 13 07:03:22 crc kubenswrapper[4644]: W1213 07:03:22.698686 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ec2e9bb_7bab_412c_8a1c_96b0e5a4d4bb.slice/crio-dbc72e6638e60065356da29152d807c5ad1e36938ebaa799b20e574e130efd9d WatchSource:0}: Error finding container dbc72e6638e60065356da29152d807c5ad1e36938ebaa799b20e574e130efd9d: Status 404 returned error can't find the container with id dbc72e6638e60065356da29152d807c5ad1e36938ebaa799b20e574e130efd9d Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.705856 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.781675 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-nq9b5\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.781929 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw2h8\" (UniqueName: \"kubernetes.io/projected/d850ca3e-58f1-4035-80b5-1799182101dc-kube-api-access-qw2h8\") pod \"nova-cell1-conductor-db-sync-nq9b5\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.782137 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-scripts\") pod \"nova-cell1-conductor-db-sync-nq9b5\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.782206 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-config-data\") pod \"nova-cell1-conductor-db-sync-nq9b5\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.815621 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 07:03:22 crc kubenswrapper[4644]: W1213 07:03:22.819607 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85c349ef_fdeb_44d2_8cf2_a55533af4d5b.slice/crio-f4c6aa4e396a9a1dd98091637838546b8b3e6bf83cf7a2cf0073f03a12074c11 WatchSource:0}: Error finding container f4c6aa4e396a9a1dd98091637838546b8b3e6bf83cf7a2cf0073f03a12074c11: Status 404 returned error can't find the container with id f4c6aa4e396a9a1dd98091637838546b8b3e6bf83cf7a2cf0073f03a12074c11 Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.829929 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-z4btn" event={"ID":"5c88cb8b-6880-4be0-a4bc-467783f0f752","Type":"ContainerStarted","Data":"96e2ba3e1b31d09d264c0684115656ca62ff25162b36332a850a72184ebf8bea"} Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.830015 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-z4btn" event={"ID":"5c88cb8b-6880-4be0-a4bc-467783f0f752","Type":"ContainerStarted","Data":"5cd283418d4140cc89e9149e3c8cf6b164cf0ce74830aa70cb11e085e28a0ba3"} Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.831534 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"85c349ef-fdeb-44d2-8cf2-a55533af4d5b","Type":"ContainerStarted","Data":"f4c6aa4e396a9a1dd98091637838546b8b3e6bf83cf7a2cf0073f03a12074c11"} Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.833323 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb","Type":"ContainerStarted","Data":"dbc72e6638e60065356da29152d807c5ad1e36938ebaa799b20e574e130efd9d"} Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.834651 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a","Type":"ContainerStarted","Data":"0e51e6a65e627e121886da88e4291bdd410d834a97dc47644643546e0caa6c88"} Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.851928 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-z4btn" podStartSLOduration=1.8519075489999999 podStartE2EDuration="1.851907549s" podCreationTimestamp="2025-12-13 07:03:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:03:22.847398659 +0000 UTC m=+1065.062349493" watchObservedRunningTime="2025-12-13 07:03:22.851907549 +0000 UTC m=+1065.066858382" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.883984 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-nq9b5\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.884088 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw2h8\" (UniqueName: \"kubernetes.io/projected/d850ca3e-58f1-4035-80b5-1799182101dc-kube-api-access-qw2h8\") pod \"nova-cell1-conductor-db-sync-nq9b5\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.884202 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-scripts\") pod \"nova-cell1-conductor-db-sync-nq9b5\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.884247 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-config-data\") pod \"nova-cell1-conductor-db-sync-nq9b5\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.891816 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-nq9b5\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.892933 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-scripts\") pod \"nova-cell1-conductor-db-sync-nq9b5\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.896990 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-config-data\") pod \"nova-cell1-conductor-db-sync-nq9b5\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.906348 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw2h8\" (UniqueName: \"kubernetes.io/projected/d850ca3e-58f1-4035-80b5-1799182101dc-kube-api-access-qw2h8\") pod \"nova-cell1-conductor-db-sync-nq9b5\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:22 crc kubenswrapper[4644]: I1213 07:03:22.988897 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:23 crc kubenswrapper[4644]: I1213 07:03:23.016044 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:03:23 crc kubenswrapper[4644]: W1213 07:03:23.022645 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4129e894_c37a_4cee_a69a_73c8eb58710e.slice/crio-2cf8b79c9c14b1d50d53fa5f6242d984d8e5f5f2a316541b64984e529208e70c WatchSource:0}: Error finding container 2cf8b79c9c14b1d50d53fa5f6242d984d8e5f5f2a316541b64984e529208e70c: Status 404 returned error can't find the container with id 2cf8b79c9c14b1d50d53fa5f6242d984d8e5f5f2a316541b64984e529208e70c Dec 13 07:03:23 crc kubenswrapper[4644]: I1213 07:03:23.091012 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8d97cbc7-mf4xx"] Dec 13 07:03:23 crc kubenswrapper[4644]: I1213 07:03:23.416726 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nq9b5"] Dec 13 07:03:23 crc kubenswrapper[4644]: I1213 07:03:23.846103 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4129e894-c37a-4cee-a69a-73c8eb58710e","Type":"ContainerStarted","Data":"2cf8b79c9c14b1d50d53fa5f6242d984d8e5f5f2a316541b64984e529208e70c"} Dec 13 07:03:23 crc kubenswrapper[4644]: I1213 07:03:23.848024 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nq9b5" event={"ID":"d850ca3e-58f1-4035-80b5-1799182101dc","Type":"ContainerStarted","Data":"54e5eac74868b80de3e3c799568b678cb8539c7c649b20365117a19da7bed4b3"} Dec 13 07:03:23 crc kubenswrapper[4644]: I1213 07:03:23.848084 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nq9b5" event={"ID":"d850ca3e-58f1-4035-80b5-1799182101dc","Type":"ContainerStarted","Data":"0a98698db70d04ec9c6aad8457e1be269827e0a1d393499d6ca3badcc1b0251e"} Dec 13 07:03:23 crc kubenswrapper[4644]: I1213 07:03:23.850417 4644 generic.go:334] "Generic (PLEG): container finished" podID="24878fb4-24ad-4fa3-ad74-edf2b9b64e3c" containerID="f10664d9e74d1a420e0ed1793cd65d600b33e0f0b64df2c03cbf2a234d09b836" exitCode=0 Dec 13 07:03:23 crc kubenswrapper[4644]: I1213 07:03:23.850484 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" event={"ID":"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c","Type":"ContainerDied","Data":"f10664d9e74d1a420e0ed1793cd65d600b33e0f0b64df2c03cbf2a234d09b836"} Dec 13 07:03:23 crc kubenswrapper[4644]: I1213 07:03:23.850524 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" event={"ID":"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c","Type":"ContainerStarted","Data":"baa2d8a8001baed09f5e61908b626a4db4e1a4a6f7e015fa4f069de891924ba3"} Dec 13 07:03:23 crc kubenswrapper[4644]: I1213 07:03:23.876409 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-nq9b5" podStartSLOduration=1.876389308 podStartE2EDuration="1.876389308s" podCreationTimestamp="2025-12-13 07:03:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:03:23.870981999 +0000 UTC m=+1066.085932863" watchObservedRunningTime="2025-12-13 07:03:23.876389308 +0000 UTC m=+1066.091340140" Dec 13 07:03:24 crc kubenswrapper[4644]: I1213 07:03:24.877281 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" event={"ID":"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c","Type":"ContainerStarted","Data":"03eecbc4efe35048316411a12731d8b644c9395a41db9192287ba1674bdc81e1"} Dec 13 07:03:24 crc kubenswrapper[4644]: I1213 07:03:24.878114 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:24 crc kubenswrapper[4644]: I1213 07:03:24.883498 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a","Type":"ContainerStarted","Data":"626ccf5e46bff9c8eba4dca8206bdc0354230d67000e83801d986195f44ee258"} Dec 13 07:03:24 crc kubenswrapper[4644]: I1213 07:03:24.901524 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" podStartSLOduration=2.901504978 podStartE2EDuration="2.901504978s" podCreationTimestamp="2025-12-13 07:03:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:03:24.892642603 +0000 UTC m=+1067.107593436" watchObservedRunningTime="2025-12-13 07:03:24.901504978 +0000 UTC m=+1067.116455811" Dec 13 07:03:24 crc kubenswrapper[4644]: I1213 07:03:24.911363 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.016371574 podStartE2EDuration="3.911340253s" podCreationTimestamp="2025-12-13 07:03:21 +0000 UTC" firstStartedPulling="2025-12-13 07:03:22.698344893 +0000 UTC m=+1064.913295726" lastFinishedPulling="2025-12-13 07:03:24.593313572 +0000 UTC m=+1066.808264405" observedRunningTime="2025-12-13 07:03:24.908037241 +0000 UTC m=+1067.122988074" watchObservedRunningTime="2025-12-13 07:03:24.911340253 +0000 UTC m=+1067.126291086" Dec 13 07:03:25 crc kubenswrapper[4644]: I1213 07:03:25.484840 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:03:25 crc kubenswrapper[4644]: I1213 07:03:25.492119 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 07:03:26 crc kubenswrapper[4644]: I1213 07:03:26.900948 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"85c349ef-fdeb-44d2-8cf2-a55533af4d5b","Type":"ContainerStarted","Data":"7dc0b92a87eea140d607390f4036ec01210b56a657743b3c6c2f4814011cf1f0"} Dec 13 07:03:26 crc kubenswrapper[4644]: I1213 07:03:26.900988 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="85c349ef-fdeb-44d2-8cf2-a55533af4d5b" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://7dc0b92a87eea140d607390f4036ec01210b56a657743b3c6c2f4814011cf1f0" gracePeriod=30 Dec 13 07:03:26 crc kubenswrapper[4644]: I1213 07:03:26.904010 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb","Type":"ContainerStarted","Data":"295d57168ab6f6432bcfb9b46df702524638954834303a2fb29caac70c7db024"} Dec 13 07:03:26 crc kubenswrapper[4644]: I1213 07:03:26.904060 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb","Type":"ContainerStarted","Data":"d1ea3af4751ad1645ef4b5c3bda75e3089f3ecffbc62a4768ce1a60e4d4960fb"} Dec 13 07:03:26 crc kubenswrapper[4644]: I1213 07:03:26.906599 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4129e894-c37a-4cee-a69a-73c8eb58710e","Type":"ContainerStarted","Data":"dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472"} Dec 13 07:03:26 crc kubenswrapper[4644]: I1213 07:03:26.906655 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4129e894-c37a-4cee-a69a-73c8eb58710e","Type":"ContainerStarted","Data":"54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635"} Dec 13 07:03:26 crc kubenswrapper[4644]: I1213 07:03:26.906679 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4129e894-c37a-4cee-a69a-73c8eb58710e" containerName="nova-metadata-log" containerID="cri-o://54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635" gracePeriod=30 Dec 13 07:03:26 crc kubenswrapper[4644]: I1213 07:03:26.906693 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4129e894-c37a-4cee-a69a-73c8eb58710e" containerName="nova-metadata-metadata" containerID="cri-o://dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472" gracePeriod=30 Dec 13 07:03:26 crc kubenswrapper[4644]: I1213 07:03:26.923396 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.9490398410000003 podStartE2EDuration="5.923380688s" podCreationTimestamp="2025-12-13 07:03:21 +0000 UTC" firstStartedPulling="2025-12-13 07:03:22.821475403 +0000 UTC m=+1065.036426235" lastFinishedPulling="2025-12-13 07:03:25.795816249 +0000 UTC m=+1068.010767082" observedRunningTime="2025-12-13 07:03:26.915428142 +0000 UTC m=+1069.130378976" watchObservedRunningTime="2025-12-13 07:03:26.923380688 +0000 UTC m=+1069.138331521" Dec 13 07:03:26 crc kubenswrapper[4644]: I1213 07:03:26.938245 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.861823396 podStartE2EDuration="5.938228227s" podCreationTimestamp="2025-12-13 07:03:21 +0000 UTC" firstStartedPulling="2025-12-13 07:03:22.720639788 +0000 UTC m=+1064.935590620" lastFinishedPulling="2025-12-13 07:03:25.797044618 +0000 UTC m=+1068.011995451" observedRunningTime="2025-12-13 07:03:26.934146462 +0000 UTC m=+1069.149097295" watchObservedRunningTime="2025-12-13 07:03:26.938228227 +0000 UTC m=+1069.153179060" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.133667 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.222320 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.483098 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.483361 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.561127 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.613279 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4129e894-c37a-4cee-a69a-73c8eb58710e-logs\") pod \"4129e894-c37a-4cee-a69a-73c8eb58710e\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.613390 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4129e894-c37a-4cee-a69a-73c8eb58710e-combined-ca-bundle\") pod \"4129e894-c37a-4cee-a69a-73c8eb58710e\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.613515 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4129e894-c37a-4cee-a69a-73c8eb58710e-config-data\") pod \"4129e894-c37a-4cee-a69a-73c8eb58710e\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.613679 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68h27\" (UniqueName: \"kubernetes.io/projected/4129e894-c37a-4cee-a69a-73c8eb58710e-kube-api-access-68h27\") pod \"4129e894-c37a-4cee-a69a-73c8eb58710e\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.614373 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4129e894-c37a-4cee-a69a-73c8eb58710e-logs" (OuterVolumeSpecName: "logs") pod "4129e894-c37a-4cee-a69a-73c8eb58710e" (UID: "4129e894-c37a-4cee-a69a-73c8eb58710e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.622916 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4129e894-c37a-4cee-a69a-73c8eb58710e-kube-api-access-68h27" (OuterVolumeSpecName: "kube-api-access-68h27") pod "4129e894-c37a-4cee-a69a-73c8eb58710e" (UID: "4129e894-c37a-4cee-a69a-73c8eb58710e"). InnerVolumeSpecName "kube-api-access-68h27". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:03:27 crc kubenswrapper[4644]: E1213 07:03:27.632781 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4129e894-c37a-4cee-a69a-73c8eb58710e-config-data podName:4129e894-c37a-4cee-a69a-73c8eb58710e nodeName:}" failed. No retries permitted until 2025-12-13 07:03:28.132759092 +0000 UTC m=+1070.347709925 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/4129e894-c37a-4cee-a69a-73c8eb58710e-config-data") pod "4129e894-c37a-4cee-a69a-73c8eb58710e" (UID: "4129e894-c37a-4cee-a69a-73c8eb58710e") : error deleting /var/lib/kubelet/pods/4129e894-c37a-4cee-a69a-73c8eb58710e/volume-subpaths: remove /var/lib/kubelet/pods/4129e894-c37a-4cee-a69a-73c8eb58710e/volume-subpaths: no such file or directory Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.635214 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4129e894-c37a-4cee-a69a-73c8eb58710e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4129e894-c37a-4cee-a69a-73c8eb58710e" (UID: "4129e894-c37a-4cee-a69a-73c8eb58710e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.716103 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68h27\" (UniqueName: \"kubernetes.io/projected/4129e894-c37a-4cee-a69a-73c8eb58710e-kube-api-access-68h27\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.716138 4644 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4129e894-c37a-4cee-a69a-73c8eb58710e-logs\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.716149 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4129e894-c37a-4cee-a69a-73c8eb58710e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.916733 4644 generic.go:334] "Generic (PLEG): container finished" podID="4129e894-c37a-4cee-a69a-73c8eb58710e" containerID="dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472" exitCode=0 Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.916765 4644 generic.go:334] "Generic (PLEG): container finished" podID="4129e894-c37a-4cee-a69a-73c8eb58710e" containerID="54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635" exitCode=143 Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.916773 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.916830 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4129e894-c37a-4cee-a69a-73c8eb58710e","Type":"ContainerDied","Data":"dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472"} Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.916908 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4129e894-c37a-4cee-a69a-73c8eb58710e","Type":"ContainerDied","Data":"54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635"} Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.916938 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4129e894-c37a-4cee-a69a-73c8eb58710e","Type":"ContainerDied","Data":"2cf8b79c9c14b1d50d53fa5f6242d984d8e5f5f2a316541b64984e529208e70c"} Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.916976 4644 scope.go:117] "RemoveContainer" containerID="dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.918338 4644 generic.go:334] "Generic (PLEG): container finished" podID="d850ca3e-58f1-4035-80b5-1799182101dc" containerID="54e5eac74868b80de3e3c799568b678cb8539c7c649b20365117a19da7bed4b3" exitCode=0 Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.918403 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nq9b5" event={"ID":"d850ca3e-58f1-4035-80b5-1799182101dc","Type":"ContainerDied","Data":"54e5eac74868b80de3e3c799568b678cb8539c7c649b20365117a19da7bed4b3"} Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.965645 4644 scope.go:117] "RemoveContainer" containerID="54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.989058 4644 scope.go:117] "RemoveContainer" containerID="dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472" Dec 13 07:03:27 crc kubenswrapper[4644]: E1213 07:03:27.989483 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472\": container with ID starting with dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472 not found: ID does not exist" containerID="dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.989559 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472"} err="failed to get container status \"dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472\": rpc error: code = NotFound desc = could not find container \"dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472\": container with ID starting with dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472 not found: ID does not exist" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.989610 4644 scope.go:117] "RemoveContainer" containerID="54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635" Dec 13 07:03:27 crc kubenswrapper[4644]: E1213 07:03:27.990045 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635\": container with ID starting with 54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635 not found: ID does not exist" containerID="54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.990085 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635"} err="failed to get container status \"54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635\": rpc error: code = NotFound desc = could not find container \"54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635\": container with ID starting with 54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635 not found: ID does not exist" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.990111 4644 scope.go:117] "RemoveContainer" containerID="dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.990486 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472"} err="failed to get container status \"dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472\": rpc error: code = NotFound desc = could not find container \"dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472\": container with ID starting with dc1f2172784171f2e628adcc2f69d8e67c5ff9620fb13219898b00bcae6c3472 not found: ID does not exist" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.990515 4644 scope.go:117] "RemoveContainer" containerID="54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635" Dec 13 07:03:27 crc kubenswrapper[4644]: I1213 07:03:27.990826 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635"} err="failed to get container status \"54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635\": rpc error: code = NotFound desc = could not find container \"54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635\": container with ID starting with 54eeeaa7806005d22c5f443735d3deaa637e2c354e533def54c060cb74a5f635 not found: ID does not exist" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.225101 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4129e894-c37a-4cee-a69a-73c8eb58710e-config-data\") pod \"4129e894-c37a-4cee-a69a-73c8eb58710e\" (UID: \"4129e894-c37a-4cee-a69a-73c8eb58710e\") " Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.228066 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4129e894-c37a-4cee-a69a-73c8eb58710e-config-data" (OuterVolumeSpecName: "config-data") pod "4129e894-c37a-4cee-a69a-73c8eb58710e" (UID: "4129e894-c37a-4cee-a69a-73c8eb58710e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.326993 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4129e894-c37a-4cee-a69a-73c8eb58710e-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.538718 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.545771 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.555666 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:03:28 crc kubenswrapper[4644]: E1213 07:03:28.556049 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4129e894-c37a-4cee-a69a-73c8eb58710e" containerName="nova-metadata-log" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.556068 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4129e894-c37a-4cee-a69a-73c8eb58710e" containerName="nova-metadata-log" Dec 13 07:03:28 crc kubenswrapper[4644]: E1213 07:03:28.556083 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4129e894-c37a-4cee-a69a-73c8eb58710e" containerName="nova-metadata-metadata" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.556089 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4129e894-c37a-4cee-a69a-73c8eb58710e" containerName="nova-metadata-metadata" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.556280 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="4129e894-c37a-4cee-a69a-73c8eb58710e" containerName="nova-metadata-metadata" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.556300 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="4129e894-c37a-4cee-a69a-73c8eb58710e" containerName="nova-metadata-log" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.557163 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.559028 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.559279 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.575463 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.633681 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.633768 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqtk2\" (UniqueName: \"kubernetes.io/projected/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-kube-api-access-dqtk2\") pod \"nova-metadata-0\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.633835 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.633863 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-logs\") pod \"nova-metadata-0\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.634027 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-config-data\") pod \"nova-metadata-0\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.735992 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-config-data\") pod \"nova-metadata-0\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.736091 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.736144 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqtk2\" (UniqueName: \"kubernetes.io/projected/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-kube-api-access-dqtk2\") pod \"nova-metadata-0\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.736183 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.736206 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-logs\") pod \"nova-metadata-0\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.736649 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-logs\") pod \"nova-metadata-0\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.740849 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.746573 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.746728 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-config-data\") pod \"nova-metadata-0\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.762346 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqtk2\" (UniqueName: \"kubernetes.io/projected/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-kube-api-access-dqtk2\") pod \"nova-metadata-0\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " pod="openstack/nova-metadata-0" Dec 13 07:03:28 crc kubenswrapper[4644]: I1213 07:03:28.877363 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.289192 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.290629 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:03:29 crc kubenswrapper[4644]: W1213 07:03:29.293624 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10e5fcd5_832b_4890_80c2_2cf4d7fafe34.slice/crio-851a1b8ed9be1339398e67df3532a6ee8ed5432038d2a99809abce878a039d4e WatchSource:0}: Error finding container 851a1b8ed9be1339398e67df3532a6ee8ed5432038d2a99809abce878a039d4e: Status 404 returned error can't find the container with id 851a1b8ed9be1339398e67df3532a6ee8ed5432038d2a99809abce878a039d4e Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.345724 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-combined-ca-bundle\") pod \"d850ca3e-58f1-4035-80b5-1799182101dc\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.345961 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-scripts\") pod \"d850ca3e-58f1-4035-80b5-1799182101dc\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.345994 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-config-data\") pod \"d850ca3e-58f1-4035-80b5-1799182101dc\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.346026 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qw2h8\" (UniqueName: \"kubernetes.io/projected/d850ca3e-58f1-4035-80b5-1799182101dc-kube-api-access-qw2h8\") pod \"d850ca3e-58f1-4035-80b5-1799182101dc\" (UID: \"d850ca3e-58f1-4035-80b5-1799182101dc\") " Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.352221 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d850ca3e-58f1-4035-80b5-1799182101dc-kube-api-access-qw2h8" (OuterVolumeSpecName: "kube-api-access-qw2h8") pod "d850ca3e-58f1-4035-80b5-1799182101dc" (UID: "d850ca3e-58f1-4035-80b5-1799182101dc"). InnerVolumeSpecName "kube-api-access-qw2h8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.352338 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-scripts" (OuterVolumeSpecName: "scripts") pod "d850ca3e-58f1-4035-80b5-1799182101dc" (UID: "d850ca3e-58f1-4035-80b5-1799182101dc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.368173 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d850ca3e-58f1-4035-80b5-1799182101dc" (UID: "d850ca3e-58f1-4035-80b5-1799182101dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.369666 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-config-data" (OuterVolumeSpecName: "config-data") pod "d850ca3e-58f1-4035-80b5-1799182101dc" (UID: "d850ca3e-58f1-4035-80b5-1799182101dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.448603 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.448905 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.448917 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qw2h8\" (UniqueName: \"kubernetes.io/projected/d850ca3e-58f1-4035-80b5-1799182101dc-kube-api-access-qw2h8\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.448928 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d850ca3e-58f1-4035-80b5-1799182101dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.941376 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nq9b5" event={"ID":"d850ca3e-58f1-4035-80b5-1799182101dc","Type":"ContainerDied","Data":"0a98698db70d04ec9c6aad8457e1be269827e0a1d393499d6ca3badcc1b0251e"} Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.941407 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nq9b5" Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.941430 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a98698db70d04ec9c6aad8457e1be269827e0a1d393499d6ca3badcc1b0251e" Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.943240 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"10e5fcd5-832b-4890-80c2-2cf4d7fafe34","Type":"ContainerStarted","Data":"0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56"} Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.943278 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"10e5fcd5-832b-4890-80c2-2cf4d7fafe34","Type":"ContainerStarted","Data":"75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906"} Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.943290 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"10e5fcd5-832b-4890-80c2-2cf4d7fafe34","Type":"ContainerStarted","Data":"851a1b8ed9be1339398e67df3532a6ee8ed5432038d2a99809abce878a039d4e"} Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.944713 4644 generic.go:334] "Generic (PLEG): container finished" podID="5c88cb8b-6880-4be0-a4bc-467783f0f752" containerID="96e2ba3e1b31d09d264c0684115656ca62ff25162b36332a850a72184ebf8bea" exitCode=0 Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.944758 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-z4btn" event={"ID":"5c88cb8b-6880-4be0-a4bc-467783f0f752","Type":"ContainerDied","Data":"96e2ba3e1b31d09d264c0684115656ca62ff25162b36332a850a72184ebf8bea"} Dec 13 07:03:29 crc kubenswrapper[4644]: I1213 07:03:29.974080 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.974063489 podStartE2EDuration="1.974063489s" podCreationTimestamp="2025-12-13 07:03:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:03:29.95889715 +0000 UTC m=+1072.173847982" watchObservedRunningTime="2025-12-13 07:03:29.974063489 +0000 UTC m=+1072.189014322" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.039053 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 13 07:03:30 crc kubenswrapper[4644]: E1213 07:03:30.039561 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d850ca3e-58f1-4035-80b5-1799182101dc" containerName="nova-cell1-conductor-db-sync" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.039583 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="d850ca3e-58f1-4035-80b5-1799182101dc" containerName="nova-cell1-conductor-db-sync" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.039821 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="d850ca3e-58f1-4035-80b5-1799182101dc" containerName="nova-cell1-conductor-db-sync" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.040561 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.043127 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.062944 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.162143 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzzbn\" (UniqueName: \"kubernetes.io/projected/d4364181-c18b-438a-8f4c-3b44b907d2ae-kube-api-access-mzzbn\") pod \"nova-cell1-conductor-0\" (UID: \"d4364181-c18b-438a-8f4c-3b44b907d2ae\") " pod="openstack/nova-cell1-conductor-0" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.162483 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4364181-c18b-438a-8f4c-3b44b907d2ae-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d4364181-c18b-438a-8f4c-3b44b907d2ae\") " pod="openstack/nova-cell1-conductor-0" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.162643 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4364181-c18b-438a-8f4c-3b44b907d2ae-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d4364181-c18b-438a-8f4c-3b44b907d2ae\") " pod="openstack/nova-cell1-conductor-0" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.265225 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4364181-c18b-438a-8f4c-3b44b907d2ae-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d4364181-c18b-438a-8f4c-3b44b907d2ae\") " pod="openstack/nova-cell1-conductor-0" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.266102 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4364181-c18b-438a-8f4c-3b44b907d2ae-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d4364181-c18b-438a-8f4c-3b44b907d2ae\") " pod="openstack/nova-cell1-conductor-0" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.266394 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzzbn\" (UniqueName: \"kubernetes.io/projected/d4364181-c18b-438a-8f4c-3b44b907d2ae-kube-api-access-mzzbn\") pod \"nova-cell1-conductor-0\" (UID: \"d4364181-c18b-438a-8f4c-3b44b907d2ae\") " pod="openstack/nova-cell1-conductor-0" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.273745 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4364181-c18b-438a-8f4c-3b44b907d2ae-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d4364181-c18b-438a-8f4c-3b44b907d2ae\") " pod="openstack/nova-cell1-conductor-0" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.273744 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4364181-c18b-438a-8f4c-3b44b907d2ae-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d4364181-c18b-438a-8f4c-3b44b907d2ae\") " pod="openstack/nova-cell1-conductor-0" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.283075 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzzbn\" (UniqueName: \"kubernetes.io/projected/d4364181-c18b-438a-8f4c-3b44b907d2ae-kube-api-access-mzzbn\") pod \"nova-cell1-conductor-0\" (UID: \"d4364181-c18b-438a-8f4c-3b44b907d2ae\") " pod="openstack/nova-cell1-conductor-0" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.365238 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.397643 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4129e894-c37a-4cee-a69a-73c8eb58710e" path="/var/lib/kubelet/pods/4129e894-c37a-4cee-a69a-73c8eb58710e/volumes" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.763922 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 13 07:03:30 crc kubenswrapper[4644]: W1213 07:03:30.768524 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4364181_c18b_438a_8f4c_3b44b907d2ae.slice/crio-11ad50edb3912252ce57978b48f1a0767788239caf58f1223e5fa7a8b00d5101 WatchSource:0}: Error finding container 11ad50edb3912252ce57978b48f1a0767788239caf58f1223e5fa7a8b00d5101: Status 404 returned error can't find the container with id 11ad50edb3912252ce57978b48f1a0767788239caf58f1223e5fa7a8b00d5101 Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.959022 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d4364181-c18b-438a-8f4c-3b44b907d2ae","Type":"ContainerStarted","Data":"637d48df87b8b74f4ac01e38eb67a609ed4af09e917485511dde24033428336d"} Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.959077 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d4364181-c18b-438a-8f4c-3b44b907d2ae","Type":"ContainerStarted","Data":"11ad50edb3912252ce57978b48f1a0767788239caf58f1223e5fa7a8b00d5101"} Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.959724 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 13 07:03:30 crc kubenswrapper[4644]: I1213 07:03:30.981179 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=0.981160726 podStartE2EDuration="981.160726ms" podCreationTimestamp="2025-12-13 07:03:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:03:30.972258036 +0000 UTC m=+1073.187208868" watchObservedRunningTime="2025-12-13 07:03:30.981160726 +0000 UTC m=+1073.196111560" Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.320701 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.386743 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dnn4\" (UniqueName: \"kubernetes.io/projected/5c88cb8b-6880-4be0-a4bc-467783f0f752-kube-api-access-4dnn4\") pod \"5c88cb8b-6880-4be0-a4bc-467783f0f752\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.386854 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-config-data\") pod \"5c88cb8b-6880-4be0-a4bc-467783f0f752\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.387025 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-scripts\") pod \"5c88cb8b-6880-4be0-a4bc-467783f0f752\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.387054 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-combined-ca-bundle\") pod \"5c88cb8b-6880-4be0-a4bc-467783f0f752\" (UID: \"5c88cb8b-6880-4be0-a4bc-467783f0f752\") " Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.393822 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c88cb8b-6880-4be0-a4bc-467783f0f752-kube-api-access-4dnn4" (OuterVolumeSpecName: "kube-api-access-4dnn4") pod "5c88cb8b-6880-4be0-a4bc-467783f0f752" (UID: "5c88cb8b-6880-4be0-a4bc-467783f0f752"). InnerVolumeSpecName "kube-api-access-4dnn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.403665 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-scripts" (OuterVolumeSpecName: "scripts") pod "5c88cb8b-6880-4be0-a4bc-467783f0f752" (UID: "5c88cb8b-6880-4be0-a4bc-467783f0f752"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.409679 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c88cb8b-6880-4be0-a4bc-467783f0f752" (UID: "5c88cb8b-6880-4be0-a4bc-467783f0f752"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.409969 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-config-data" (OuterVolumeSpecName: "config-data") pod "5c88cb8b-6880-4be0-a4bc-467783f0f752" (UID: "5c88cb8b-6880-4be0-a4bc-467783f0f752"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.489188 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.489221 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.489231 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c88cb8b-6880-4be0-a4bc-467783f0f752-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.489241 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dnn4\" (UniqueName: \"kubernetes.io/projected/5c88cb8b-6880-4be0-a4bc-467783f0f752-kube-api-access-4dnn4\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.973027 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-z4btn" Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.973029 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-z4btn" event={"ID":"5c88cb8b-6880-4be0-a4bc-467783f0f752","Type":"ContainerDied","Data":"5cd283418d4140cc89e9149e3c8cf6b164cf0ce74830aa70cb11e085e28a0ba3"} Dec 13 07:03:31 crc kubenswrapper[4644]: I1213 07:03:31.973377 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5cd283418d4140cc89e9149e3c8cf6b164cf0ce74830aa70cb11e085e28a0ba3" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.094835 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.094883 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.133367 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.164749 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.165485 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.172489 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.184842 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.185117 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="10e5fcd5-832b-4890-80c2-2cf4d7fafe34" containerName="nova-metadata-log" containerID="cri-o://75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906" gracePeriod=30 Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.185311 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="10e5fcd5-832b-4890-80c2-2cf4d7fafe34" containerName="nova-metadata-metadata" containerID="cri-o://0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56" gracePeriod=30 Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.525594 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.596694 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68c677b759-hjpvx"] Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.596924 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-68c677b759-hjpvx" podUID="69eb14f6-88c6-43e9-89e0-554944201a8e" containerName="dnsmasq-dns" containerID="cri-o://762cf4d78861a7f6a00567d2e978d8882a7352585c0759e2dbadb5783e5a8e49" gracePeriod=10 Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.713796 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.818063 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-combined-ca-bundle\") pod \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.818306 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-nova-metadata-tls-certs\") pod \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.818332 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqtk2\" (UniqueName: \"kubernetes.io/projected/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-kube-api-access-dqtk2\") pod \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.818389 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-config-data\") pod \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.818424 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-logs\") pod \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\" (UID: \"10e5fcd5-832b-4890-80c2-2cf4d7fafe34\") " Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.819339 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-logs" (OuterVolumeSpecName: "logs") pod "10e5fcd5-832b-4890-80c2-2cf4d7fafe34" (UID: "10e5fcd5-832b-4890-80c2-2cf4d7fafe34"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.819618 4644 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-logs\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.827693 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-kube-api-access-dqtk2" (OuterVolumeSpecName: "kube-api-access-dqtk2") pod "10e5fcd5-832b-4890-80c2-2cf4d7fafe34" (UID: "10e5fcd5-832b-4890-80c2-2cf4d7fafe34"). InnerVolumeSpecName "kube-api-access-dqtk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.851518 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-config-data" (OuterVolumeSpecName: "config-data") pod "10e5fcd5-832b-4890-80c2-2cf4d7fafe34" (UID: "10e5fcd5-832b-4890-80c2-2cf4d7fafe34"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.856030 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "10e5fcd5-832b-4890-80c2-2cf4d7fafe34" (UID: "10e5fcd5-832b-4890-80c2-2cf4d7fafe34"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.878865 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "10e5fcd5-832b-4890-80c2-2cf4d7fafe34" (UID: "10e5fcd5-832b-4890-80c2-2cf4d7fafe34"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.922046 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.922092 4644 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.922105 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqtk2\" (UniqueName: \"kubernetes.io/projected/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-kube-api-access-dqtk2\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.922114 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10e5fcd5-832b-4890-80c2-2cf4d7fafe34-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.983070 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.983953 4644 generic.go:334] "Generic (PLEG): container finished" podID="69eb14f6-88c6-43e9-89e0-554944201a8e" containerID="762cf4d78861a7f6a00567d2e978d8882a7352585c0759e2dbadb5783e5a8e49" exitCode=0 Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.984020 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68c677b759-hjpvx" event={"ID":"69eb14f6-88c6-43e9-89e0-554944201a8e","Type":"ContainerDied","Data":"762cf4d78861a7f6a00567d2e978d8882a7352585c0759e2dbadb5783e5a8e49"} Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.984073 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68c677b759-hjpvx" event={"ID":"69eb14f6-88c6-43e9-89e0-554944201a8e","Type":"ContainerDied","Data":"0024bd96d4997e6dae36d74565a1f9a09f5b707bd20e789d68680f21206d7842"} Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.984097 4644 scope.go:117] "RemoveContainer" containerID="762cf4d78861a7f6a00567d2e978d8882a7352585c0759e2dbadb5783e5a8e49" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.985941 4644 generic.go:334] "Generic (PLEG): container finished" podID="10e5fcd5-832b-4890-80c2-2cf4d7fafe34" containerID="0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56" exitCode=0 Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.985960 4644 generic.go:334] "Generic (PLEG): container finished" podID="10e5fcd5-832b-4890-80c2-2cf4d7fafe34" containerID="75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906" exitCode=143 Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.986148 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"10e5fcd5-832b-4890-80c2-2cf4d7fafe34","Type":"ContainerDied","Data":"0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56"} Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.986177 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"10e5fcd5-832b-4890-80c2-2cf4d7fafe34","Type":"ContainerDied","Data":"75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906"} Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.986189 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"10e5fcd5-832b-4890-80c2-2cf4d7fafe34","Type":"ContainerDied","Data":"851a1b8ed9be1339398e67df3532a6ee8ed5432038d2a99809abce878a039d4e"} Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.986248 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.986772 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" containerName="nova-api-log" containerID="cri-o://d1ea3af4751ad1645ef4b5c3bda75e3089f3ecffbc62a4768ce1a60e4d4960fb" gracePeriod=30 Dec 13 07:03:32 crc kubenswrapper[4644]: I1213 07:03:32.986895 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" containerName="nova-api-api" containerID="cri-o://295d57168ab6f6432bcfb9b46df702524638954834303a2fb29caac70c7db024" gracePeriod=30 Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:32.992832 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.173:8774/\": EOF" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:32.992967 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.173:8774/\": EOF" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.007321 4644 scope.go:117] "RemoveContainer" containerID="3c0e7457ae06ed80b08125bc23a5d70c1fba51131f9e5b65a0300256774e9ab9" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.026981 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.045097 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.054932 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.072704 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:03:33 crc kubenswrapper[4644]: E1213 07:03:33.073126 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c88cb8b-6880-4be0-a4bc-467783f0f752" containerName="nova-manage" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.073146 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c88cb8b-6880-4be0-a4bc-467783f0f752" containerName="nova-manage" Dec 13 07:03:33 crc kubenswrapper[4644]: E1213 07:03:33.073158 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10e5fcd5-832b-4890-80c2-2cf4d7fafe34" containerName="nova-metadata-log" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.073166 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="10e5fcd5-832b-4890-80c2-2cf4d7fafe34" containerName="nova-metadata-log" Dec 13 07:03:33 crc kubenswrapper[4644]: E1213 07:03:33.073179 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69eb14f6-88c6-43e9-89e0-554944201a8e" containerName="dnsmasq-dns" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.073185 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="69eb14f6-88c6-43e9-89e0-554944201a8e" containerName="dnsmasq-dns" Dec 13 07:03:33 crc kubenswrapper[4644]: E1213 07:03:33.073200 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10e5fcd5-832b-4890-80c2-2cf4d7fafe34" containerName="nova-metadata-metadata" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.073205 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="10e5fcd5-832b-4890-80c2-2cf4d7fafe34" containerName="nova-metadata-metadata" Dec 13 07:03:33 crc kubenswrapper[4644]: E1213 07:03:33.073227 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69eb14f6-88c6-43e9-89e0-554944201a8e" containerName="init" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.073233 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="69eb14f6-88c6-43e9-89e0-554944201a8e" containerName="init" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.073493 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c88cb8b-6880-4be0-a4bc-467783f0f752" containerName="nova-manage" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.073505 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="10e5fcd5-832b-4890-80c2-2cf4d7fafe34" containerName="nova-metadata-metadata" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.073514 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="69eb14f6-88c6-43e9-89e0-554944201a8e" containerName="dnsmasq-dns" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.073545 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="10e5fcd5-832b-4890-80c2-2cf4d7fafe34" containerName="nova-metadata-log" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.074617 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.074640 4644 scope.go:117] "RemoveContainer" containerID="762cf4d78861a7f6a00567d2e978d8882a7352585c0759e2dbadb5783e5a8e49" Dec 13 07:03:33 crc kubenswrapper[4644]: E1213 07:03:33.075199 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"762cf4d78861a7f6a00567d2e978d8882a7352585c0759e2dbadb5783e5a8e49\": container with ID starting with 762cf4d78861a7f6a00567d2e978d8882a7352585c0759e2dbadb5783e5a8e49 not found: ID does not exist" containerID="762cf4d78861a7f6a00567d2e978d8882a7352585c0759e2dbadb5783e5a8e49" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.075234 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"762cf4d78861a7f6a00567d2e978d8882a7352585c0759e2dbadb5783e5a8e49"} err="failed to get container status \"762cf4d78861a7f6a00567d2e978d8882a7352585c0759e2dbadb5783e5a8e49\": rpc error: code = NotFound desc = could not find container \"762cf4d78861a7f6a00567d2e978d8882a7352585c0759e2dbadb5783e5a8e49\": container with ID starting with 762cf4d78861a7f6a00567d2e978d8882a7352585c0759e2dbadb5783e5a8e49 not found: ID does not exist" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.075259 4644 scope.go:117] "RemoveContainer" containerID="3c0e7457ae06ed80b08125bc23a5d70c1fba51131f9e5b65a0300256774e9ab9" Dec 13 07:03:33 crc kubenswrapper[4644]: E1213 07:03:33.075845 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c0e7457ae06ed80b08125bc23a5d70c1fba51131f9e5b65a0300256774e9ab9\": container with ID starting with 3c0e7457ae06ed80b08125bc23a5d70c1fba51131f9e5b65a0300256774e9ab9 not found: ID does not exist" containerID="3c0e7457ae06ed80b08125bc23a5d70c1fba51131f9e5b65a0300256774e9ab9" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.075890 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c0e7457ae06ed80b08125bc23a5d70c1fba51131f9e5b65a0300256774e9ab9"} err="failed to get container status \"3c0e7457ae06ed80b08125bc23a5d70c1fba51131f9e5b65a0300256774e9ab9\": rpc error: code = NotFound desc = could not find container \"3c0e7457ae06ed80b08125bc23a5d70c1fba51131f9e5b65a0300256774e9ab9\": container with ID starting with 3c0e7457ae06ed80b08125bc23a5d70c1fba51131f9e5b65a0300256774e9ab9 not found: ID does not exist" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.075980 4644 scope.go:117] "RemoveContainer" containerID="0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.077648 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.077862 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.100943 4644 scope.go:117] "RemoveContainer" containerID="75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.117576 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.125427 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-ovsdbserver-sb\") pod \"69eb14f6-88c6-43e9-89e0-554944201a8e\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.125532 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-ovsdbserver-nb\") pod \"69eb14f6-88c6-43e9-89e0-554944201a8e\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.125614 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-config\") pod \"69eb14f6-88c6-43e9-89e0-554944201a8e\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.125736 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2pll\" (UniqueName: \"kubernetes.io/projected/69eb14f6-88c6-43e9-89e0-554944201a8e-kube-api-access-m2pll\") pod \"69eb14f6-88c6-43e9-89e0-554944201a8e\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.125824 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-dns-svc\") pod \"69eb14f6-88c6-43e9-89e0-554944201a8e\" (UID: \"69eb14f6-88c6-43e9-89e0-554944201a8e\") " Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.130368 4644 scope.go:117] "RemoveContainer" containerID="0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56" Dec 13 07:03:33 crc kubenswrapper[4644]: E1213 07:03:33.131676 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56\": container with ID starting with 0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56 not found: ID does not exist" containerID="0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.131738 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56"} err="failed to get container status \"0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56\": rpc error: code = NotFound desc = could not find container \"0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56\": container with ID starting with 0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56 not found: ID does not exist" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.131770 4644 scope.go:117] "RemoveContainer" containerID="75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.131676 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69eb14f6-88c6-43e9-89e0-554944201a8e-kube-api-access-m2pll" (OuterVolumeSpecName: "kube-api-access-m2pll") pod "69eb14f6-88c6-43e9-89e0-554944201a8e" (UID: "69eb14f6-88c6-43e9-89e0-554944201a8e"). InnerVolumeSpecName "kube-api-access-m2pll". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:03:33 crc kubenswrapper[4644]: E1213 07:03:33.132247 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906\": container with ID starting with 75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906 not found: ID does not exist" containerID="75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.132276 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906"} err="failed to get container status \"75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906\": rpc error: code = NotFound desc = could not find container \"75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906\": container with ID starting with 75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906 not found: ID does not exist" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.132294 4644 scope.go:117] "RemoveContainer" containerID="0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.132615 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56"} err="failed to get container status \"0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56\": rpc error: code = NotFound desc = could not find container \"0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56\": container with ID starting with 0e5ff506f4d876c81c3f69dd19dddda734fcf6a76c6a09652b3fc119d5f08a56 not found: ID does not exist" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.132635 4644 scope.go:117] "RemoveContainer" containerID="75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.133011 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906"} err="failed to get container status \"75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906\": rpc error: code = NotFound desc = could not find container \"75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906\": container with ID starting with 75b6c865d723e10c6f45dceae025bd517555e90131c8a18a30fc6aec0fab4906 not found: ID does not exist" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.165672 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-config" (OuterVolumeSpecName: "config") pod "69eb14f6-88c6-43e9-89e0-554944201a8e" (UID: "69eb14f6-88c6-43e9-89e0-554944201a8e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.168046 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "69eb14f6-88c6-43e9-89e0-554944201a8e" (UID: "69eb14f6-88c6-43e9-89e0-554944201a8e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.171836 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "69eb14f6-88c6-43e9-89e0-554944201a8e" (UID: "69eb14f6-88c6-43e9-89e0-554944201a8e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.173905 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "69eb14f6-88c6-43e9-89e0-554944201a8e" (UID: "69eb14f6-88c6-43e9-89e0-554944201a8e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.227752 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/996b5118-5c68-484e-84e4-520e4e895a4d-logs\") pod \"nova-metadata-0\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.227881 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-config-data\") pod \"nova-metadata-0\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.227919 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.227956 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h75f2\" (UniqueName: \"kubernetes.io/projected/996b5118-5c68-484e-84e4-520e4e895a4d-kube-api-access-h75f2\") pod \"nova-metadata-0\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.228208 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.228417 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2pll\" (UniqueName: \"kubernetes.io/projected/69eb14f6-88c6-43e9-89e0-554944201a8e-kube-api-access-m2pll\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.228456 4644 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.228466 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.228476 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.228486 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/69eb14f6-88c6-43e9-89e0-554944201a8e-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.329509 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.329632 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/996b5118-5c68-484e-84e4-520e4e895a4d-logs\") pod \"nova-metadata-0\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.330043 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/996b5118-5c68-484e-84e4-520e4e895a4d-logs\") pod \"nova-metadata-0\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.330110 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-config-data\") pod \"nova-metadata-0\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.330133 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.330529 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h75f2\" (UniqueName: \"kubernetes.io/projected/996b5118-5c68-484e-84e4-520e4e895a4d-kube-api-access-h75f2\") pod \"nova-metadata-0\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.332649 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.333724 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.336126 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-config-data\") pod \"nova-metadata-0\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.345575 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h75f2\" (UniqueName: \"kubernetes.io/projected/996b5118-5c68-484e-84e4-520e4e895a4d-kube-api-access-h75f2\") pod \"nova-metadata-0\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.400073 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.868233 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:03:33 crc kubenswrapper[4644]: W1213 07:03:33.871952 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod996b5118_5c68_484e_84e4_520e4e895a4d.slice/crio-fe384c1357bb5062ccef64f25cf13079209f610e159ae1732c48c3dbaa18921b WatchSource:0}: Error finding container fe384c1357bb5062ccef64f25cf13079209f610e159ae1732c48c3dbaa18921b: Status 404 returned error can't find the container with id fe384c1357bb5062ccef64f25cf13079209f610e159ae1732c48c3dbaa18921b Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.995622 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68c677b759-hjpvx" Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.999657 4644 generic.go:334] "Generic (PLEG): container finished" podID="0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" containerID="d1ea3af4751ad1645ef4b5c3bda75e3089f3ecffbc62a4768ce1a60e4d4960fb" exitCode=143 Dec 13 07:03:33 crc kubenswrapper[4644]: I1213 07:03:33.999736 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb","Type":"ContainerDied","Data":"d1ea3af4751ad1645ef4b5c3bda75e3089f3ecffbc62a4768ce1a60e4d4960fb"} Dec 13 07:03:34 crc kubenswrapper[4644]: I1213 07:03:34.004338 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"996b5118-5c68-484e-84e4-520e4e895a4d","Type":"ContainerStarted","Data":"fe384c1357bb5062ccef64f25cf13079209f610e159ae1732c48c3dbaa18921b"} Dec 13 07:03:34 crc kubenswrapper[4644]: I1213 07:03:34.004367 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a" containerName="nova-scheduler-scheduler" containerID="cri-o://626ccf5e46bff9c8eba4dca8206bdc0354230d67000e83801d986195f44ee258" gracePeriod=30 Dec 13 07:03:34 crc kubenswrapper[4644]: I1213 07:03:34.027242 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68c677b759-hjpvx"] Dec 13 07:03:34 crc kubenswrapper[4644]: I1213 07:03:34.034468 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68c677b759-hjpvx"] Dec 13 07:03:34 crc kubenswrapper[4644]: I1213 07:03:34.398398 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10e5fcd5-832b-4890-80c2-2cf4d7fafe34" path="/var/lib/kubelet/pods/10e5fcd5-832b-4890-80c2-2cf4d7fafe34/volumes" Dec 13 07:03:34 crc kubenswrapper[4644]: I1213 07:03:34.399114 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69eb14f6-88c6-43e9-89e0-554944201a8e" path="/var/lib/kubelet/pods/69eb14f6-88c6-43e9-89e0-554944201a8e/volumes" Dec 13 07:03:35 crc kubenswrapper[4644]: I1213 07:03:35.013392 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"996b5118-5c68-484e-84e4-520e4e895a4d","Type":"ContainerStarted","Data":"188fa5495797aaea18d0cb461e779a677bef6faabdae10a28c2eab1a8788e1b9"} Dec 13 07:03:35 crc kubenswrapper[4644]: I1213 07:03:35.013432 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"996b5118-5c68-484e-84e4-520e4e895a4d","Type":"ContainerStarted","Data":"e184cd7401aa2a0a7a3725a7d59cf6bef4b45dfa1c20d220b327e6ad646415ea"} Dec 13 07:03:35 crc kubenswrapper[4644]: I1213 07:03:35.034236 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.034220329 podStartE2EDuration="2.034220329s" podCreationTimestamp="2025-12-13 07:03:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:03:35.027505001 +0000 UTC m=+1077.242455834" watchObservedRunningTime="2025-12-13 07:03:35.034220329 +0000 UTC m=+1077.249171162" Dec 13 07:03:35 crc kubenswrapper[4644]: I1213 07:03:35.386393 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 13 07:03:36 crc kubenswrapper[4644]: I1213 07:03:36.023535 4644 generic.go:334] "Generic (PLEG): container finished" podID="4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a" containerID="626ccf5e46bff9c8eba4dca8206bdc0354230d67000e83801d986195f44ee258" exitCode=0 Dec 13 07:03:36 crc kubenswrapper[4644]: I1213 07:03:36.024420 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a","Type":"ContainerDied","Data":"626ccf5e46bff9c8eba4dca8206bdc0354230d67000e83801d986195f44ee258"} Dec 13 07:03:36 crc kubenswrapper[4644]: I1213 07:03:36.100798 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 07:03:36 crc kubenswrapper[4644]: I1213 07:03:36.183721 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-config-data\") pod \"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a\" (UID: \"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a\") " Dec 13 07:03:36 crc kubenswrapper[4644]: I1213 07:03:36.183791 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-combined-ca-bundle\") pod \"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a\" (UID: \"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a\") " Dec 13 07:03:36 crc kubenswrapper[4644]: I1213 07:03:36.183968 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcj9n\" (UniqueName: \"kubernetes.io/projected/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-kube-api-access-tcj9n\") pod \"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a\" (UID: \"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a\") " Dec 13 07:03:36 crc kubenswrapper[4644]: I1213 07:03:36.189768 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-kube-api-access-tcj9n" (OuterVolumeSpecName: "kube-api-access-tcj9n") pod "4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a" (UID: "4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a"). InnerVolumeSpecName "kube-api-access-tcj9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:03:36 crc kubenswrapper[4644]: I1213 07:03:36.206274 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-config-data" (OuterVolumeSpecName: "config-data") pod "4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a" (UID: "4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:36 crc kubenswrapper[4644]: I1213 07:03:36.210391 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a" (UID: "4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:36 crc kubenswrapper[4644]: I1213 07:03:36.286523 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcj9n\" (UniqueName: \"kubernetes.io/projected/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-kube-api-access-tcj9n\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:36 crc kubenswrapper[4644]: I1213 07:03:36.286559 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:36 crc kubenswrapper[4644]: I1213 07:03:36.286570 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.033732 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a","Type":"ContainerDied","Data":"0e51e6a65e627e121886da88e4291bdd410d834a97dc47644643546e0caa6c88"} Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.033785 4644 scope.go:117] "RemoveContainer" containerID="626ccf5e46bff9c8eba4dca8206bdc0354230d67000e83801d986195f44ee258" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.033781 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.051509 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.059348 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.068861 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 07:03:37 crc kubenswrapper[4644]: E1213 07:03:37.069227 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a" containerName="nova-scheduler-scheduler" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.069247 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a" containerName="nova-scheduler-scheduler" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.069421 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a" containerName="nova-scheduler-scheduler" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.070070 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.071670 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.077539 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.202518 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9a9cd4cc-b556-46d6-a80c-39b4312abc2a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.202905 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9fp6\" (UniqueName: \"kubernetes.io/projected/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-kube-api-access-k9fp6\") pod \"nova-scheduler-0\" (UID: \"9a9cd4cc-b556-46d6-a80c-39b4312abc2a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.203160 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-config-data\") pod \"nova-scheduler-0\" (UID: \"9a9cd4cc-b556-46d6-a80c-39b4312abc2a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.305139 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9a9cd4cc-b556-46d6-a80c-39b4312abc2a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.305184 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9fp6\" (UniqueName: \"kubernetes.io/projected/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-kube-api-access-k9fp6\") pod \"nova-scheduler-0\" (UID: \"9a9cd4cc-b556-46d6-a80c-39b4312abc2a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.305353 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-config-data\") pod \"nova-scheduler-0\" (UID: \"9a9cd4cc-b556-46d6-a80c-39b4312abc2a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.310161 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9a9cd4cc-b556-46d6-a80c-39b4312abc2a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.310597 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-config-data\") pod \"nova-scheduler-0\" (UID: \"9a9cd4cc-b556-46d6-a80c-39b4312abc2a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.318737 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9fp6\" (UniqueName: \"kubernetes.io/projected/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-kube-api-access-k9fp6\") pod \"nova-scheduler-0\" (UID: \"9a9cd4cc-b556-46d6-a80c-39b4312abc2a\") " pod="openstack/nova-scheduler-0" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.385385 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 07:03:37 crc kubenswrapper[4644]: I1213 07:03:37.757192 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.042597 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9a9cd4cc-b556-46d6-a80c-39b4312abc2a","Type":"ContainerStarted","Data":"f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2"} Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.042638 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9a9cd4cc-b556-46d6-a80c-39b4312abc2a","Type":"ContainerStarted","Data":"a0edb6fa1afe5c8fa149537b7aacaac5adf06de94f61f4ee972cd8f21f6fe1f6"} Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.069783 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.069767798 podStartE2EDuration="1.069767798s" podCreationTimestamp="2025-12-13 07:03:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:03:38.068433059 +0000 UTC m=+1080.283383892" watchObservedRunningTime="2025-12-13 07:03:38.069767798 +0000 UTC m=+1080.284718631" Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.397356 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a" path="/var/lib/kubelet/pods/4ebf88d3-b1ff-43eb-8f15-e1f6b3a7700a/volumes" Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.401269 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.401300 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.786783 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.935536 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-config-data\") pod \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.935637 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pztpd\" (UniqueName: \"kubernetes.io/projected/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-kube-api-access-pztpd\") pod \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.935702 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-logs\") pod \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.935831 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-combined-ca-bundle\") pod \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\" (UID: \"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb\") " Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.936149 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-logs" (OuterVolumeSpecName: "logs") pod "0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" (UID: "0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.936572 4644 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-logs\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.955878 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" (UID: "0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.956073 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-kube-api-access-pztpd" (OuterVolumeSpecName: "kube-api-access-pztpd") pod "0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" (UID: "0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb"). InnerVolumeSpecName "kube-api-access-pztpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:03:38 crc kubenswrapper[4644]: I1213 07:03:38.966389 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-config-data" (OuterVolumeSpecName: "config-data") pod "0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" (UID: "0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.037803 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.037840 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.037849 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pztpd\" (UniqueName: \"kubernetes.io/projected/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb-kube-api-access-pztpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.050748 4644 generic.go:334] "Generic (PLEG): container finished" podID="0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" containerID="295d57168ab6f6432bcfb9b46df702524638954834303a2fb29caac70c7db024" exitCode=0 Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.050822 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb","Type":"ContainerDied","Data":"295d57168ab6f6432bcfb9b46df702524638954834303a2fb29caac70c7db024"} Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.050862 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb","Type":"ContainerDied","Data":"dbc72e6638e60065356da29152d807c5ad1e36938ebaa799b20e574e130efd9d"} Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.050884 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.050892 4644 scope.go:117] "RemoveContainer" containerID="295d57168ab6f6432bcfb9b46df702524638954834303a2fb29caac70c7db024" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.077520 4644 scope.go:117] "RemoveContainer" containerID="d1ea3af4751ad1645ef4b5c3bda75e3089f3ecffbc62a4768ce1a60e4d4960fb" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.078748 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.092207 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.105914 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.107270 4644 scope.go:117] "RemoveContainer" containerID="295d57168ab6f6432bcfb9b46df702524638954834303a2fb29caac70c7db024" Dec 13 07:03:39 crc kubenswrapper[4644]: E1213 07:03:39.107894 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"295d57168ab6f6432bcfb9b46df702524638954834303a2fb29caac70c7db024\": container with ID starting with 295d57168ab6f6432bcfb9b46df702524638954834303a2fb29caac70c7db024 not found: ID does not exist" containerID="295d57168ab6f6432bcfb9b46df702524638954834303a2fb29caac70c7db024" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.107941 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"295d57168ab6f6432bcfb9b46df702524638954834303a2fb29caac70c7db024"} err="failed to get container status \"295d57168ab6f6432bcfb9b46df702524638954834303a2fb29caac70c7db024\": rpc error: code = NotFound desc = could not find container \"295d57168ab6f6432bcfb9b46df702524638954834303a2fb29caac70c7db024\": container with ID starting with 295d57168ab6f6432bcfb9b46df702524638954834303a2fb29caac70c7db024 not found: ID does not exist" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.107968 4644 scope.go:117] "RemoveContainer" containerID="d1ea3af4751ad1645ef4b5c3bda75e3089f3ecffbc62a4768ce1a60e4d4960fb" Dec 13 07:03:39 crc kubenswrapper[4644]: E1213 07:03:39.108881 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1ea3af4751ad1645ef4b5c3bda75e3089f3ecffbc62a4768ce1a60e4d4960fb\": container with ID starting with d1ea3af4751ad1645ef4b5c3bda75e3089f3ecffbc62a4768ce1a60e4d4960fb not found: ID does not exist" containerID="d1ea3af4751ad1645ef4b5c3bda75e3089f3ecffbc62a4768ce1a60e4d4960fb" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.108941 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1ea3af4751ad1645ef4b5c3bda75e3089f3ecffbc62a4768ce1a60e4d4960fb"} err="failed to get container status \"d1ea3af4751ad1645ef4b5c3bda75e3089f3ecffbc62a4768ce1a60e4d4960fb\": rpc error: code = NotFound desc = could not find container \"d1ea3af4751ad1645ef4b5c3bda75e3089f3ecffbc62a4768ce1a60e4d4960fb\": container with ID starting with d1ea3af4751ad1645ef4b5c3bda75e3089f3ecffbc62a4768ce1a60e4d4960fb not found: ID does not exist" Dec 13 07:03:39 crc kubenswrapper[4644]: E1213 07:03:39.109186 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" containerName="nova-api-log" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.109212 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" containerName="nova-api-log" Dec 13 07:03:39 crc kubenswrapper[4644]: E1213 07:03:39.109223 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" containerName="nova-api-api" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.109230 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" containerName="nova-api-api" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.109425 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" containerName="nova-api-log" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.109467 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" containerName="nova-api-api" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.110469 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.112026 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.119806 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.241670 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f66fa31-b8e2-405d-8c55-75aee839481c-config-data\") pod \"nova-api-0\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " pod="openstack/nova-api-0" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.241744 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vjth\" (UniqueName: \"kubernetes.io/projected/4f66fa31-b8e2-405d-8c55-75aee839481c-kube-api-access-7vjth\") pod \"nova-api-0\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " pod="openstack/nova-api-0" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.241806 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f66fa31-b8e2-405d-8c55-75aee839481c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " pod="openstack/nova-api-0" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.241904 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f66fa31-b8e2-405d-8c55-75aee839481c-logs\") pod \"nova-api-0\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " pod="openstack/nova-api-0" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.343999 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f66fa31-b8e2-405d-8c55-75aee839481c-config-data\") pod \"nova-api-0\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " pod="openstack/nova-api-0" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.344088 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vjth\" (UniqueName: \"kubernetes.io/projected/4f66fa31-b8e2-405d-8c55-75aee839481c-kube-api-access-7vjth\") pod \"nova-api-0\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " pod="openstack/nova-api-0" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.344159 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f66fa31-b8e2-405d-8c55-75aee839481c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " pod="openstack/nova-api-0" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.344239 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f66fa31-b8e2-405d-8c55-75aee839481c-logs\") pod \"nova-api-0\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " pod="openstack/nova-api-0" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.344701 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f66fa31-b8e2-405d-8c55-75aee839481c-logs\") pod \"nova-api-0\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " pod="openstack/nova-api-0" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.346973 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f66fa31-b8e2-405d-8c55-75aee839481c-config-data\") pod \"nova-api-0\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " pod="openstack/nova-api-0" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.347665 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f66fa31-b8e2-405d-8c55-75aee839481c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " pod="openstack/nova-api-0" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.364918 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vjth\" (UniqueName: \"kubernetes.io/projected/4f66fa31-b8e2-405d-8c55-75aee839481c-kube-api-access-7vjth\") pod \"nova-api-0\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " pod="openstack/nova-api-0" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.432900 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.753676 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.753969 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:03:39 crc kubenswrapper[4644]: I1213 07:03:39.821245 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:03:40 crc kubenswrapper[4644]: I1213 07:03:40.059142 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4f66fa31-b8e2-405d-8c55-75aee839481c","Type":"ContainerStarted","Data":"bd6c689d21449964e13836ded5d77b6a24598fcdd7beb5e720e3bb25a0fa3380"} Dec 13 07:03:40 crc kubenswrapper[4644]: I1213 07:03:40.059382 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4f66fa31-b8e2-405d-8c55-75aee839481c","Type":"ContainerStarted","Data":"6da10d6334014ce1e9d1372b49a3317d2d064152c46d6b5b18ad9751d887d2f7"} Dec 13 07:03:40 crc kubenswrapper[4644]: I1213 07:03:40.059396 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4f66fa31-b8e2-405d-8c55-75aee839481c","Type":"ContainerStarted","Data":"1a5d2c0515a2a32533c97469f0df96da2809e83b6b444a76f82e84a04ef0ee47"} Dec 13 07:03:40 crc kubenswrapper[4644]: I1213 07:03:40.084150 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.084132855 podStartE2EDuration="1.084132855s" podCreationTimestamp="2025-12-13 07:03:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:03:40.077666685 +0000 UTC m=+1082.292617519" watchObservedRunningTime="2025-12-13 07:03:40.084132855 +0000 UTC m=+1082.299083688" Dec 13 07:03:40 crc kubenswrapper[4644]: I1213 07:03:40.398158 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb" path="/var/lib/kubelet/pods/0ec2e9bb-7bab-412c-8a1c-96b0e5a4d4bb/volumes" Dec 13 07:03:42 crc kubenswrapper[4644]: I1213 07:03:42.125177 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 13 07:03:42 crc kubenswrapper[4644]: I1213 07:03:42.385745 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 13 07:03:43 crc kubenswrapper[4644]: I1213 07:03:43.400906 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 13 07:03:43 crc kubenswrapper[4644]: I1213 07:03:43.401167 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 13 07:03:43 crc kubenswrapper[4644]: I1213 07:03:43.836221 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 07:03:43 crc kubenswrapper[4644]: I1213 07:03:43.836404 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="628995d0-5034-4f64-8c48-50eee052e5db" containerName="kube-state-metrics" containerID="cri-o://2e205ab8eedc8db08ffe602a78832cb3ea0bdf11e2b521d1e5d23ec822c4bfcd" gracePeriod=30 Dec 13 07:03:44 crc kubenswrapper[4644]: I1213 07:03:44.102049 4644 generic.go:334] "Generic (PLEG): container finished" podID="628995d0-5034-4f64-8c48-50eee052e5db" containerID="2e205ab8eedc8db08ffe602a78832cb3ea0bdf11e2b521d1e5d23ec822c4bfcd" exitCode=2 Dec 13 07:03:44 crc kubenswrapper[4644]: I1213 07:03:44.102270 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"628995d0-5034-4f64-8c48-50eee052e5db","Type":"ContainerDied","Data":"2e205ab8eedc8db08ffe602a78832cb3ea0bdf11e2b521d1e5d23ec822c4bfcd"} Dec 13 07:03:44 crc kubenswrapper[4644]: I1213 07:03:44.239245 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 13 07:03:44 crc kubenswrapper[4644]: I1213 07:03:44.343553 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcx5f\" (UniqueName: \"kubernetes.io/projected/628995d0-5034-4f64-8c48-50eee052e5db-kube-api-access-hcx5f\") pod \"628995d0-5034-4f64-8c48-50eee052e5db\" (UID: \"628995d0-5034-4f64-8c48-50eee052e5db\") " Dec 13 07:03:44 crc kubenswrapper[4644]: I1213 07:03:44.349478 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/628995d0-5034-4f64-8c48-50eee052e5db-kube-api-access-hcx5f" (OuterVolumeSpecName: "kube-api-access-hcx5f") pod "628995d0-5034-4f64-8c48-50eee052e5db" (UID: "628995d0-5034-4f64-8c48-50eee052e5db"). InnerVolumeSpecName "kube-api-access-hcx5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:03:44 crc kubenswrapper[4644]: I1213 07:03:44.412560 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="996b5118-5c68-484e-84e4-520e4e895a4d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.181:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 13 07:03:44 crc kubenswrapper[4644]: I1213 07:03:44.412570 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="996b5118-5c68-484e-84e4-520e4e895a4d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.181:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 13 07:03:44 crc kubenswrapper[4644]: I1213 07:03:44.446540 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcx5f\" (UniqueName: \"kubernetes.io/projected/628995d0-5034-4f64-8c48-50eee052e5db-kube-api-access-hcx5f\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:44 crc kubenswrapper[4644]: I1213 07:03:44.784880 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:03:44 crc kubenswrapper[4644]: I1213 07:03:44.785724 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="ceilometer-notification-agent" containerID="cri-o://d5fda868f9ffecbb4c063085543de14ab89960fea6fa63e0042427cb9f72369f" gracePeriod=30 Dec 13 07:03:44 crc kubenswrapper[4644]: I1213 07:03:44.785714 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="sg-core" containerID="cri-o://927e0b2ce482a998fa79cb7c8fa50cf94d7d42fe52fd04f3ebd7ec1b0d08277a" gracePeriod=30 Dec 13 07:03:44 crc kubenswrapper[4644]: I1213 07:03:44.785735 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="proxy-httpd" containerID="cri-o://8eecf73f846b26c0de9e698d7a3018a15783ba62b371b0531cbee39d71c86136" gracePeriod=30 Dec 13 07:03:44 crc kubenswrapper[4644]: I1213 07:03:44.785676 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="ceilometer-central-agent" containerID="cri-o://faf7135bdb68f28b30495758634b09f474e7c43de085aa43507a3dac29201f69" gracePeriod=30 Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.114067 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"628995d0-5034-4f64-8c48-50eee052e5db","Type":"ContainerDied","Data":"5ba946bab0f6707806daeba084e2c5380d3ee3a484882f59c7a932d9b64f57fc"} Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.114112 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.114132 4644 scope.go:117] "RemoveContainer" containerID="2e205ab8eedc8db08ffe602a78832cb3ea0bdf11e2b521d1e5d23ec822c4bfcd" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.118943 4644 generic.go:334] "Generic (PLEG): container finished" podID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerID="8eecf73f846b26c0de9e698d7a3018a15783ba62b371b0531cbee39d71c86136" exitCode=0 Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.118971 4644 generic.go:334] "Generic (PLEG): container finished" podID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerID="927e0b2ce482a998fa79cb7c8fa50cf94d7d42fe52fd04f3ebd7ec1b0d08277a" exitCode=2 Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.118980 4644 generic.go:334] "Generic (PLEG): container finished" podID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerID="faf7135bdb68f28b30495758634b09f474e7c43de085aa43507a3dac29201f69" exitCode=0 Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.118999 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49","Type":"ContainerDied","Data":"8eecf73f846b26c0de9e698d7a3018a15783ba62b371b0531cbee39d71c86136"} Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.119022 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49","Type":"ContainerDied","Data":"927e0b2ce482a998fa79cb7c8fa50cf94d7d42fe52fd04f3ebd7ec1b0d08277a"} Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.119031 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49","Type":"ContainerDied","Data":"faf7135bdb68f28b30495758634b09f474e7c43de085aa43507a3dac29201f69"} Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.142102 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.151333 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.164928 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 07:03:45 crc kubenswrapper[4644]: E1213 07:03:45.165303 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="628995d0-5034-4f64-8c48-50eee052e5db" containerName="kube-state-metrics" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.165324 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="628995d0-5034-4f64-8c48-50eee052e5db" containerName="kube-state-metrics" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.165580 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="628995d0-5034-4f64-8c48-50eee052e5db" containerName="kube-state-metrics" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.166189 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.169281 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.169413 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.185777 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.283121 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/316f5e12-4172-4f69-8aac-ec24edf8e012-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"316f5e12-4172-4f69-8aac-ec24edf8e012\") " pod="openstack/kube-state-metrics-0" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.283431 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/316f5e12-4172-4f69-8aac-ec24edf8e012-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"316f5e12-4172-4f69-8aac-ec24edf8e012\") " pod="openstack/kube-state-metrics-0" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.283550 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txztx\" (UniqueName: \"kubernetes.io/projected/316f5e12-4172-4f69-8aac-ec24edf8e012-kube-api-access-txztx\") pod \"kube-state-metrics-0\" (UID: \"316f5e12-4172-4f69-8aac-ec24edf8e012\") " pod="openstack/kube-state-metrics-0" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.283601 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/316f5e12-4172-4f69-8aac-ec24edf8e012-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"316f5e12-4172-4f69-8aac-ec24edf8e012\") " pod="openstack/kube-state-metrics-0" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.385204 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txztx\" (UniqueName: \"kubernetes.io/projected/316f5e12-4172-4f69-8aac-ec24edf8e012-kube-api-access-txztx\") pod \"kube-state-metrics-0\" (UID: \"316f5e12-4172-4f69-8aac-ec24edf8e012\") " pod="openstack/kube-state-metrics-0" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.385253 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/316f5e12-4172-4f69-8aac-ec24edf8e012-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"316f5e12-4172-4f69-8aac-ec24edf8e012\") " pod="openstack/kube-state-metrics-0" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.385298 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/316f5e12-4172-4f69-8aac-ec24edf8e012-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"316f5e12-4172-4f69-8aac-ec24edf8e012\") " pod="openstack/kube-state-metrics-0" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.385423 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/316f5e12-4172-4f69-8aac-ec24edf8e012-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"316f5e12-4172-4f69-8aac-ec24edf8e012\") " pod="openstack/kube-state-metrics-0" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.390366 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/316f5e12-4172-4f69-8aac-ec24edf8e012-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"316f5e12-4172-4f69-8aac-ec24edf8e012\") " pod="openstack/kube-state-metrics-0" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.398986 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/316f5e12-4172-4f69-8aac-ec24edf8e012-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"316f5e12-4172-4f69-8aac-ec24edf8e012\") " pod="openstack/kube-state-metrics-0" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.402841 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/316f5e12-4172-4f69-8aac-ec24edf8e012-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"316f5e12-4172-4f69-8aac-ec24edf8e012\") " pod="openstack/kube-state-metrics-0" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.406977 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txztx\" (UniqueName: \"kubernetes.io/projected/316f5e12-4172-4f69-8aac-ec24edf8e012-kube-api-access-txztx\") pod \"kube-state-metrics-0\" (UID: \"316f5e12-4172-4f69-8aac-ec24edf8e012\") " pod="openstack/kube-state-metrics-0" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.487914 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 13 07:03:45 crc kubenswrapper[4644]: I1213 07:03:45.917834 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 13 07:03:46 crc kubenswrapper[4644]: I1213 07:03:46.127201 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"316f5e12-4172-4f69-8aac-ec24edf8e012","Type":"ContainerStarted","Data":"ae56fe7c74f418983b56302f98427d987527168b9ce2598cc55bed97d3a920be"} Dec 13 07:03:46 crc kubenswrapper[4644]: I1213 07:03:46.397427 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="628995d0-5034-4f64-8c48-50eee052e5db" path="/var/lib/kubelet/pods/628995d0-5034-4f64-8c48-50eee052e5db/volumes" Dec 13 07:03:47 crc kubenswrapper[4644]: I1213 07:03:47.158406 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"316f5e12-4172-4f69-8aac-ec24edf8e012","Type":"ContainerStarted","Data":"b1c964748f7c13f520c14787ebdd52bfad6720d301e3bc2a8ff205fa32440836"} Dec 13 07:03:47 crc kubenswrapper[4644]: I1213 07:03:47.158541 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 13 07:03:47 crc kubenswrapper[4644]: I1213 07:03:47.176168 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.8982937560000002 podStartE2EDuration="2.176151522s" podCreationTimestamp="2025-12-13 07:03:45 +0000 UTC" firstStartedPulling="2025-12-13 07:03:45.921292263 +0000 UTC m=+1088.136243095" lastFinishedPulling="2025-12-13 07:03:46.199150028 +0000 UTC m=+1088.414100861" observedRunningTime="2025-12-13 07:03:47.169356203 +0000 UTC m=+1089.384307036" watchObservedRunningTime="2025-12-13 07:03:47.176151522 +0000 UTC m=+1089.391102355" Dec 13 07:03:47 crc kubenswrapper[4644]: I1213 07:03:47.386124 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 13 07:03:47 crc kubenswrapper[4644]: I1213 07:03:47.409395 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.185108 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.701005 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.848661 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-scripts\") pod \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.848728 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-log-httpd\") pod \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.848802 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4hlg\" (UniqueName: \"kubernetes.io/projected/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-kube-api-access-g4hlg\") pod \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.848875 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-config-data\") pod \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.848916 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-sg-core-conf-yaml\") pod \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.848944 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-run-httpd\") pod \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.849013 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-combined-ca-bundle\") pod \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\" (UID: \"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49\") " Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.850619 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" (UID: "f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.850643 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" (UID: "f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.854683 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-scripts" (OuterVolumeSpecName: "scripts") pod "f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" (UID: "f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.854806 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-kube-api-access-g4hlg" (OuterVolumeSpecName: "kube-api-access-g4hlg") pod "f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" (UID: "f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49"). InnerVolumeSpecName "kube-api-access-g4hlg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.874006 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" (UID: "f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.906391 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" (UID: "f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.943515 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-config-data" (OuterVolumeSpecName: "config-data") pod "f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" (UID: "f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.951698 4644 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.951735 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.951745 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.951754 4644 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.951762 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4hlg\" (UniqueName: \"kubernetes.io/projected/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-kube-api-access-g4hlg\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.951771 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:48 crc kubenswrapper[4644]: I1213 07:03:48.951778 4644 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.174891 4644 generic.go:334] "Generic (PLEG): container finished" podID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerID="d5fda868f9ffecbb4c063085543de14ab89960fea6fa63e0042427cb9f72369f" exitCode=0 Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.174947 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.174986 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49","Type":"ContainerDied","Data":"d5fda868f9ffecbb4c063085543de14ab89960fea6fa63e0042427cb9f72369f"} Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.175039 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49","Type":"ContainerDied","Data":"c3d44277d7246df484249428330b46d150179bd2ac142cf5ad147b63eaf9d2e4"} Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.175059 4644 scope.go:117] "RemoveContainer" containerID="8eecf73f846b26c0de9e698d7a3018a15783ba62b371b0531cbee39d71c86136" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.192713 4644 scope.go:117] "RemoveContainer" containerID="927e0b2ce482a998fa79cb7c8fa50cf94d7d42fe52fd04f3ebd7ec1b0d08277a" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.201586 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.209066 4644 scope.go:117] "RemoveContainer" containerID="d5fda868f9ffecbb4c063085543de14ab89960fea6fa63e0042427cb9f72369f" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.209164 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.235810 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:03:49 crc kubenswrapper[4644]: E1213 07:03:49.236230 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="sg-core" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.236248 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="sg-core" Dec 13 07:03:49 crc kubenswrapper[4644]: E1213 07:03:49.236262 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="ceilometer-central-agent" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.236268 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="ceilometer-central-agent" Dec 13 07:03:49 crc kubenswrapper[4644]: E1213 07:03:49.236281 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="proxy-httpd" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.236287 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="proxy-httpd" Dec 13 07:03:49 crc kubenswrapper[4644]: E1213 07:03:49.236304 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="ceilometer-notification-agent" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.236310 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="ceilometer-notification-agent" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.236490 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="ceilometer-notification-agent" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.236507 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="sg-core" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.236516 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="ceilometer-central-agent" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.236540 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" containerName="proxy-httpd" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.238010 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.240380 4644 scope.go:117] "RemoveContainer" containerID="faf7135bdb68f28b30495758634b09f474e7c43de085aa43507a3dac29201f69" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.240708 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.240765 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.240943 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.246352 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.261646 4644 scope.go:117] "RemoveContainer" containerID="8eecf73f846b26c0de9e698d7a3018a15783ba62b371b0531cbee39d71c86136" Dec 13 07:03:49 crc kubenswrapper[4644]: E1213 07:03:49.262362 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8eecf73f846b26c0de9e698d7a3018a15783ba62b371b0531cbee39d71c86136\": container with ID starting with 8eecf73f846b26c0de9e698d7a3018a15783ba62b371b0531cbee39d71c86136 not found: ID does not exist" containerID="8eecf73f846b26c0de9e698d7a3018a15783ba62b371b0531cbee39d71c86136" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.262397 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8eecf73f846b26c0de9e698d7a3018a15783ba62b371b0531cbee39d71c86136"} err="failed to get container status \"8eecf73f846b26c0de9e698d7a3018a15783ba62b371b0531cbee39d71c86136\": rpc error: code = NotFound desc = could not find container \"8eecf73f846b26c0de9e698d7a3018a15783ba62b371b0531cbee39d71c86136\": container with ID starting with 8eecf73f846b26c0de9e698d7a3018a15783ba62b371b0531cbee39d71c86136 not found: ID does not exist" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.262423 4644 scope.go:117] "RemoveContainer" containerID="927e0b2ce482a998fa79cb7c8fa50cf94d7d42fe52fd04f3ebd7ec1b0d08277a" Dec 13 07:03:49 crc kubenswrapper[4644]: E1213 07:03:49.262785 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"927e0b2ce482a998fa79cb7c8fa50cf94d7d42fe52fd04f3ebd7ec1b0d08277a\": container with ID starting with 927e0b2ce482a998fa79cb7c8fa50cf94d7d42fe52fd04f3ebd7ec1b0d08277a not found: ID does not exist" containerID="927e0b2ce482a998fa79cb7c8fa50cf94d7d42fe52fd04f3ebd7ec1b0d08277a" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.262828 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"927e0b2ce482a998fa79cb7c8fa50cf94d7d42fe52fd04f3ebd7ec1b0d08277a"} err="failed to get container status \"927e0b2ce482a998fa79cb7c8fa50cf94d7d42fe52fd04f3ebd7ec1b0d08277a\": rpc error: code = NotFound desc = could not find container \"927e0b2ce482a998fa79cb7c8fa50cf94d7d42fe52fd04f3ebd7ec1b0d08277a\": container with ID starting with 927e0b2ce482a998fa79cb7c8fa50cf94d7d42fe52fd04f3ebd7ec1b0d08277a not found: ID does not exist" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.262855 4644 scope.go:117] "RemoveContainer" containerID="d5fda868f9ffecbb4c063085543de14ab89960fea6fa63e0042427cb9f72369f" Dec 13 07:03:49 crc kubenswrapper[4644]: E1213 07:03:49.263210 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5fda868f9ffecbb4c063085543de14ab89960fea6fa63e0042427cb9f72369f\": container with ID starting with d5fda868f9ffecbb4c063085543de14ab89960fea6fa63e0042427cb9f72369f not found: ID does not exist" containerID="d5fda868f9ffecbb4c063085543de14ab89960fea6fa63e0042427cb9f72369f" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.263236 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5fda868f9ffecbb4c063085543de14ab89960fea6fa63e0042427cb9f72369f"} err="failed to get container status \"d5fda868f9ffecbb4c063085543de14ab89960fea6fa63e0042427cb9f72369f\": rpc error: code = NotFound desc = could not find container \"d5fda868f9ffecbb4c063085543de14ab89960fea6fa63e0042427cb9f72369f\": container with ID starting with d5fda868f9ffecbb4c063085543de14ab89960fea6fa63e0042427cb9f72369f not found: ID does not exist" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.263253 4644 scope.go:117] "RemoveContainer" containerID="faf7135bdb68f28b30495758634b09f474e7c43de085aa43507a3dac29201f69" Dec 13 07:03:49 crc kubenswrapper[4644]: E1213 07:03:49.263511 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"faf7135bdb68f28b30495758634b09f474e7c43de085aa43507a3dac29201f69\": container with ID starting with faf7135bdb68f28b30495758634b09f474e7c43de085aa43507a3dac29201f69 not found: ID does not exist" containerID="faf7135bdb68f28b30495758634b09f474e7c43de085aa43507a3dac29201f69" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.263533 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faf7135bdb68f28b30495758634b09f474e7c43de085aa43507a3dac29201f69"} err="failed to get container status \"faf7135bdb68f28b30495758634b09f474e7c43de085aa43507a3dac29201f69\": rpc error: code = NotFound desc = could not find container \"faf7135bdb68f28b30495758634b09f474e7c43de085aa43507a3dac29201f69\": container with ID starting with faf7135bdb68f28b30495758634b09f474e7c43de085aa43507a3dac29201f69 not found: ID does not exist" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.360135 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-config-data\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.360197 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.360277 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.360367 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-scripts\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.360422 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.360528 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7842864-e743-436c-a829-9c3be434cb89-log-httpd\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.360575 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flqp6\" (UniqueName: \"kubernetes.io/projected/f7842864-e743-436c-a829-9c3be434cb89-kube-api-access-flqp6\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.360756 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7842864-e743-436c-a829-9c3be434cb89-run-httpd\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.433670 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.433721 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.462391 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flqp6\" (UniqueName: \"kubernetes.io/projected/f7842864-e743-436c-a829-9c3be434cb89-kube-api-access-flqp6\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.462530 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7842864-e743-436c-a829-9c3be434cb89-run-httpd\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.462591 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-config-data\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.462614 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.462652 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.462673 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-scripts\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.462694 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.462733 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7842864-e743-436c-a829-9c3be434cb89-log-httpd\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.463148 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7842864-e743-436c-a829-9c3be434cb89-run-httpd\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.463173 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7842864-e743-436c-a829-9c3be434cb89-log-httpd\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.468156 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.468340 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.468525 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-scripts\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.469030 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.471069 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-config-data\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.475995 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flqp6\" (UniqueName: \"kubernetes.io/projected/f7842864-e743-436c-a829-9c3be434cb89-kube-api-access-flqp6\") pod \"ceilometer-0\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.554150 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:03:49 crc kubenswrapper[4644]: I1213 07:03:49.962471 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:03:50 crc kubenswrapper[4644]: I1213 07:03:50.184914 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7842864-e743-436c-a829-9c3be434cb89","Type":"ContainerStarted","Data":"e3ebc03f35d679daf4a71301c3910c0c6675f0811e2c1aa595c9db63d833aef3"} Dec 13 07:03:50 crc kubenswrapper[4644]: I1213 07:03:50.396832 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49" path="/var/lib/kubelet/pods/f8f4fd88-62a2-4ff6-a724-5cfc2e15fb49/volumes" Dec 13 07:03:50 crc kubenswrapper[4644]: I1213 07:03:50.515595 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4f66fa31-b8e2-405d-8c55-75aee839481c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.183:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 13 07:03:50 crc kubenswrapper[4644]: I1213 07:03:50.515631 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4f66fa31-b8e2-405d-8c55-75aee839481c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.183:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 13 07:03:51 crc kubenswrapper[4644]: I1213 07:03:51.193996 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7842864-e743-436c-a829-9c3be434cb89","Type":"ContainerStarted","Data":"e2d09db72a9ac6ea1dc14a4748a04dde81d5858632c07376178fc18cee2565bc"} Dec 13 07:03:52 crc kubenswrapper[4644]: I1213 07:03:52.204215 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7842864-e743-436c-a829-9c3be434cb89","Type":"ContainerStarted","Data":"bad98fad7253b63abb5a9336a7c07a4f4a15f25ab8d981fb9bec1cc5203dc650"} Dec 13 07:03:53 crc kubenswrapper[4644]: I1213 07:03:53.213676 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7842864-e743-436c-a829-9c3be434cb89","Type":"ContainerStarted","Data":"e7c4eb53540d35ad4847284d07fe75530647bac800eb776bb2866742d2f13b64"} Dec 13 07:03:53 crc kubenswrapper[4644]: I1213 07:03:53.405616 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 13 07:03:53 crc kubenswrapper[4644]: I1213 07:03:53.408087 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 13 07:03:53 crc kubenswrapper[4644]: I1213 07:03:53.424320 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 13 07:03:54 crc kubenswrapper[4644]: I1213 07:03:54.222999 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7842864-e743-436c-a829-9c3be434cb89","Type":"ContainerStarted","Data":"96568e3bb0ac20eff94edcdda1181f06bb5b82722b52ca6642af2d5f51e7751d"} Dec 13 07:03:54 crc kubenswrapper[4644]: I1213 07:03:54.233970 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 13 07:03:54 crc kubenswrapper[4644]: I1213 07:03:54.241669 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.5703293249999999 podStartE2EDuration="5.241654248s" podCreationTimestamp="2025-12-13 07:03:49 +0000 UTC" firstStartedPulling="2025-12-13 07:03:49.969181813 +0000 UTC m=+1092.184132646" lastFinishedPulling="2025-12-13 07:03:53.640506736 +0000 UTC m=+1095.855457569" observedRunningTime="2025-12-13 07:03:54.238573396 +0000 UTC m=+1096.453524229" watchObservedRunningTime="2025-12-13 07:03:54.241654248 +0000 UTC m=+1096.456605082" Dec 13 07:03:55 crc kubenswrapper[4644]: I1213 07:03:55.232401 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 07:03:55 crc kubenswrapper[4644]: I1213 07:03:55.497738 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 13 07:03:57 crc kubenswrapper[4644]: I1213 07:03:57.255943 4644 generic.go:334] "Generic (PLEG): container finished" podID="85c349ef-fdeb-44d2-8cf2-a55533af4d5b" containerID="7dc0b92a87eea140d607390f4036ec01210b56a657743b3c6c2f4814011cf1f0" exitCode=137 Dec 13 07:03:57 crc kubenswrapper[4644]: I1213 07:03:57.256036 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"85c349ef-fdeb-44d2-8cf2-a55533af4d5b","Type":"ContainerDied","Data":"7dc0b92a87eea140d607390f4036ec01210b56a657743b3c6c2f4814011cf1f0"} Dec 13 07:03:57 crc kubenswrapper[4644]: E1213 07:03:57.286251 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85c349ef_fdeb_44d2_8cf2_a55533af4d5b.slice/crio-conmon-7dc0b92a87eea140d607390f4036ec01210b56a657743b3c6c2f4814011cf1f0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85c349ef_fdeb_44d2_8cf2_a55533af4d5b.slice/crio-7dc0b92a87eea140d607390f4036ec01210b56a657743b3c6c2f4814011cf1f0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8f4fd88_62a2_4ff6_a724_5cfc2e15fb49.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8f4fd88_62a2_4ff6_a724_5cfc2e15fb49.slice/crio-conmon-d5fda868f9ffecbb4c063085543de14ab89960fea6fa63e0042427cb9f72369f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8f4fd88_62a2_4ff6_a724_5cfc2e15fb49.slice/crio-c3d44277d7246df484249428330b46d150179bd2ac142cf5ad147b63eaf9d2e4\": RecentStats: unable to find data in memory cache]" Dec 13 07:03:57 crc kubenswrapper[4644]: I1213 07:03:57.385726 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:57 crc kubenswrapper[4644]: I1213 07:03:57.524497 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-combined-ca-bundle\") pod \"85c349ef-fdeb-44d2-8cf2-a55533af4d5b\" (UID: \"85c349ef-fdeb-44d2-8cf2-a55533af4d5b\") " Dec 13 07:03:57 crc kubenswrapper[4644]: I1213 07:03:57.524688 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-config-data\") pod \"85c349ef-fdeb-44d2-8cf2-a55533af4d5b\" (UID: \"85c349ef-fdeb-44d2-8cf2-a55533af4d5b\") " Dec 13 07:03:57 crc kubenswrapper[4644]: I1213 07:03:57.524720 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwd4d\" (UniqueName: \"kubernetes.io/projected/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-kube-api-access-bwd4d\") pod \"85c349ef-fdeb-44d2-8cf2-a55533af4d5b\" (UID: \"85c349ef-fdeb-44d2-8cf2-a55533af4d5b\") " Dec 13 07:03:57 crc kubenswrapper[4644]: I1213 07:03:57.529010 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-kube-api-access-bwd4d" (OuterVolumeSpecName: "kube-api-access-bwd4d") pod "85c349ef-fdeb-44d2-8cf2-a55533af4d5b" (UID: "85c349ef-fdeb-44d2-8cf2-a55533af4d5b"). InnerVolumeSpecName "kube-api-access-bwd4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:03:57 crc kubenswrapper[4644]: I1213 07:03:57.545308 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "85c349ef-fdeb-44d2-8cf2-a55533af4d5b" (UID: "85c349ef-fdeb-44d2-8cf2-a55533af4d5b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:57 crc kubenswrapper[4644]: I1213 07:03:57.545701 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-config-data" (OuterVolumeSpecName: "config-data") pod "85c349ef-fdeb-44d2-8cf2-a55533af4d5b" (UID: "85c349ef-fdeb-44d2-8cf2-a55533af4d5b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:03:57 crc kubenswrapper[4644]: I1213 07:03:57.629205 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:57 crc kubenswrapper[4644]: I1213 07:03:57.629236 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:57 crc kubenswrapper[4644]: I1213 07:03:57.629246 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwd4d\" (UniqueName: \"kubernetes.io/projected/85c349ef-fdeb-44d2-8cf2-a55533af4d5b-kube-api-access-bwd4d\") on node \"crc\" DevicePath \"\"" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.265892 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"85c349ef-fdeb-44d2-8cf2-a55533af4d5b","Type":"ContainerDied","Data":"f4c6aa4e396a9a1dd98091637838546b8b3e6bf83cf7a2cf0073f03a12074c11"} Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.265951 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.265952 4644 scope.go:117] "RemoveContainer" containerID="7dc0b92a87eea140d607390f4036ec01210b56a657743b3c6c2f4814011cf1f0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.292147 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.299068 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.306573 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 07:03:58 crc kubenswrapper[4644]: E1213 07:03:58.306934 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85c349ef-fdeb-44d2-8cf2-a55533af4d5b" containerName="nova-cell1-novncproxy-novncproxy" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.306953 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="85c349ef-fdeb-44d2-8cf2-a55533af4d5b" containerName="nova-cell1-novncproxy-novncproxy" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.307164 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="85c349ef-fdeb-44d2-8cf2-a55533af4d5b" containerName="nova-cell1-novncproxy-novncproxy" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.307724 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.311258 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.311406 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.311553 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.320250 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.396816 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85c349ef-fdeb-44d2-8cf2-a55533af4d5b" path="/var/lib/kubelet/pods/85c349ef-fdeb-44d2-8cf2-a55533af4d5b/volumes" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.451145 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12464868-cccc-417b-b431-4cd8e1317137-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"12464868-cccc-417b-b431-4cd8e1317137\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.451202 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/12464868-cccc-417b-b431-4cd8e1317137-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"12464868-cccc-417b-b431-4cd8e1317137\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.451226 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7w9k\" (UniqueName: \"kubernetes.io/projected/12464868-cccc-417b-b431-4cd8e1317137-kube-api-access-s7w9k\") pod \"nova-cell1-novncproxy-0\" (UID: \"12464868-cccc-417b-b431-4cd8e1317137\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.451263 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/12464868-cccc-417b-b431-4cd8e1317137-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"12464868-cccc-417b-b431-4cd8e1317137\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.451479 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12464868-cccc-417b-b431-4cd8e1317137-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"12464868-cccc-417b-b431-4cd8e1317137\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.553074 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12464868-cccc-417b-b431-4cd8e1317137-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"12464868-cccc-417b-b431-4cd8e1317137\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.553134 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/12464868-cccc-417b-b431-4cd8e1317137-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"12464868-cccc-417b-b431-4cd8e1317137\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.553156 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7w9k\" (UniqueName: \"kubernetes.io/projected/12464868-cccc-417b-b431-4cd8e1317137-kube-api-access-s7w9k\") pod \"nova-cell1-novncproxy-0\" (UID: \"12464868-cccc-417b-b431-4cd8e1317137\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.553211 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/12464868-cccc-417b-b431-4cd8e1317137-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"12464868-cccc-417b-b431-4cd8e1317137\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.553895 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12464868-cccc-417b-b431-4cd8e1317137-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"12464868-cccc-417b-b431-4cd8e1317137\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.557762 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12464868-cccc-417b-b431-4cd8e1317137-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"12464868-cccc-417b-b431-4cd8e1317137\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.557763 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/12464868-cccc-417b-b431-4cd8e1317137-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"12464868-cccc-417b-b431-4cd8e1317137\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.558184 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12464868-cccc-417b-b431-4cd8e1317137-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"12464868-cccc-417b-b431-4cd8e1317137\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.563046 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/12464868-cccc-417b-b431-4cd8e1317137-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"12464868-cccc-417b-b431-4cd8e1317137\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.569215 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7w9k\" (UniqueName: \"kubernetes.io/projected/12464868-cccc-417b-b431-4cd8e1317137-kube-api-access-s7w9k\") pod \"nova-cell1-novncproxy-0\" (UID: \"12464868-cccc-417b-b431-4cd8e1317137\") " pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:58 crc kubenswrapper[4644]: I1213 07:03:58.622643 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:03:59 crc kubenswrapper[4644]: I1213 07:03:59.018798 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 13 07:03:59 crc kubenswrapper[4644]: W1213 07:03:59.020707 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12464868_cccc_417b_b431_4cd8e1317137.slice/crio-37e716412d9080a78a011042b7a41fe7d59d5b27ebb198f3256ccddf3599fee6 WatchSource:0}: Error finding container 37e716412d9080a78a011042b7a41fe7d59d5b27ebb198f3256ccddf3599fee6: Status 404 returned error can't find the container with id 37e716412d9080a78a011042b7a41fe7d59d5b27ebb198f3256ccddf3599fee6 Dec 13 07:03:59 crc kubenswrapper[4644]: I1213 07:03:59.274496 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"12464868-cccc-417b-b431-4cd8e1317137","Type":"ContainerStarted","Data":"a9c2b5603a7511f2282c2027b0fa9486a8ffffe10f4005baad0377cf413a52ff"} Dec 13 07:03:59 crc kubenswrapper[4644]: I1213 07:03:59.274817 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"12464868-cccc-417b-b431-4cd8e1317137","Type":"ContainerStarted","Data":"37e716412d9080a78a011042b7a41fe7d59d5b27ebb198f3256ccddf3599fee6"} Dec 13 07:03:59 crc kubenswrapper[4644]: I1213 07:03:59.295276 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.295255542 podStartE2EDuration="1.295255542s" podCreationTimestamp="2025-12-13 07:03:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:03:59.289382918 +0000 UTC m=+1101.504333751" watchObservedRunningTime="2025-12-13 07:03:59.295255542 +0000 UTC m=+1101.510206374" Dec 13 07:03:59 crc kubenswrapper[4644]: I1213 07:03:59.439504 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 13 07:03:59 crc kubenswrapper[4644]: I1213 07:03:59.440051 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 13 07:03:59 crc kubenswrapper[4644]: I1213 07:03:59.442280 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 13 07:03:59 crc kubenswrapper[4644]: I1213 07:03:59.444665 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.284335 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.287785 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.415973 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6f69c5c76f-qtbp6"] Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.418520 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.442155 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f69c5c76f-qtbp6"] Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.594996 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc8rx\" (UniqueName: \"kubernetes.io/projected/40dff749-3ac6-4c02-b892-e0d70c4e267f-kube-api-access-gc8rx\") pod \"dnsmasq-dns-6f69c5c76f-qtbp6\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.595052 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-ovsdbserver-sb\") pod \"dnsmasq-dns-6f69c5c76f-qtbp6\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.595091 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-ovsdbserver-nb\") pod \"dnsmasq-dns-6f69c5c76f-qtbp6\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.595329 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-config\") pod \"dnsmasq-dns-6f69c5c76f-qtbp6\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.595485 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-dns-svc\") pod \"dnsmasq-dns-6f69c5c76f-qtbp6\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.697556 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc8rx\" (UniqueName: \"kubernetes.io/projected/40dff749-3ac6-4c02-b892-e0d70c4e267f-kube-api-access-gc8rx\") pod \"dnsmasq-dns-6f69c5c76f-qtbp6\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.697612 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-ovsdbserver-sb\") pod \"dnsmasq-dns-6f69c5c76f-qtbp6\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.697660 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-ovsdbserver-nb\") pod \"dnsmasq-dns-6f69c5c76f-qtbp6\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.697718 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-config\") pod \"dnsmasq-dns-6f69c5c76f-qtbp6\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.697767 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-dns-svc\") pod \"dnsmasq-dns-6f69c5c76f-qtbp6\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.699053 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-dns-svc\") pod \"dnsmasq-dns-6f69c5c76f-qtbp6\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.699086 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-ovsdbserver-sb\") pod \"dnsmasq-dns-6f69c5c76f-qtbp6\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.699074 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-ovsdbserver-nb\") pod \"dnsmasq-dns-6f69c5c76f-qtbp6\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.699107 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-config\") pod \"dnsmasq-dns-6f69c5c76f-qtbp6\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.715970 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc8rx\" (UniqueName: \"kubernetes.io/projected/40dff749-3ac6-4c02-b892-e0d70c4e267f-kube-api-access-gc8rx\") pod \"dnsmasq-dns-6f69c5c76f-qtbp6\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:00 crc kubenswrapper[4644]: I1213 07:04:00.767794 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:01 crc kubenswrapper[4644]: I1213 07:04:01.119683 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f69c5c76f-qtbp6"] Dec 13 07:04:01 crc kubenswrapper[4644]: W1213 07:04:01.123067 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40dff749_3ac6_4c02_b892_e0d70c4e267f.slice/crio-be691b2ff7b7517ffbf0dcd37deebbeea3f91994577126e10e8b1521acff34e0 WatchSource:0}: Error finding container be691b2ff7b7517ffbf0dcd37deebbeea3f91994577126e10e8b1521acff34e0: Status 404 returned error can't find the container with id be691b2ff7b7517ffbf0dcd37deebbeea3f91994577126e10e8b1521acff34e0 Dec 13 07:04:01 crc kubenswrapper[4644]: I1213 07:04:01.290349 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" event={"ID":"40dff749-3ac6-4c02-b892-e0d70c4e267f","Type":"ContainerStarted","Data":"be691b2ff7b7517ffbf0dcd37deebbeea3f91994577126e10e8b1521acff34e0"} Dec 13 07:04:02 crc kubenswrapper[4644]: I1213 07:04:02.300188 4644 generic.go:334] "Generic (PLEG): container finished" podID="40dff749-3ac6-4c02-b892-e0d70c4e267f" containerID="e8a10d061eda4493e42c0d950f165c5bcacfe182a9ab17865b739ae4fd491a34" exitCode=0 Dec 13 07:04:02 crc kubenswrapper[4644]: I1213 07:04:02.300316 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" event={"ID":"40dff749-3ac6-4c02-b892-e0d70c4e267f","Type":"ContainerDied","Data":"e8a10d061eda4493e42c0d950f165c5bcacfe182a9ab17865b739ae4fd491a34"} Dec 13 07:04:02 crc kubenswrapper[4644]: I1213 07:04:02.432450 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:04:02 crc kubenswrapper[4644]: I1213 07:04:02.432772 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="ceilometer-central-agent" containerID="cri-o://e2d09db72a9ac6ea1dc14a4748a04dde81d5858632c07376178fc18cee2565bc" gracePeriod=30 Dec 13 07:04:02 crc kubenswrapper[4644]: I1213 07:04:02.432794 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="proxy-httpd" containerID="cri-o://96568e3bb0ac20eff94edcdda1181f06bb5b82722b52ca6642af2d5f51e7751d" gracePeriod=30 Dec 13 07:04:02 crc kubenswrapper[4644]: I1213 07:04:02.432814 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="ceilometer-notification-agent" containerID="cri-o://bad98fad7253b63abb5a9336a7c07a4f4a15f25ab8d981fb9bec1cc5203dc650" gracePeriod=30 Dec 13 07:04:02 crc kubenswrapper[4644]: I1213 07:04:02.432837 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="sg-core" containerID="cri-o://e7c4eb53540d35ad4847284d07fe75530647bac800eb776bb2866742d2f13b64" gracePeriod=30 Dec 13 07:04:02 crc kubenswrapper[4644]: I1213 07:04:02.992289 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:04:03 crc kubenswrapper[4644]: I1213 07:04:03.310474 4644 generic.go:334] "Generic (PLEG): container finished" podID="f7842864-e743-436c-a829-9c3be434cb89" containerID="96568e3bb0ac20eff94edcdda1181f06bb5b82722b52ca6642af2d5f51e7751d" exitCode=0 Dec 13 07:04:03 crc kubenswrapper[4644]: I1213 07:04:03.310509 4644 generic.go:334] "Generic (PLEG): container finished" podID="f7842864-e743-436c-a829-9c3be434cb89" containerID="e7c4eb53540d35ad4847284d07fe75530647bac800eb776bb2866742d2f13b64" exitCode=2 Dec 13 07:04:03 crc kubenswrapper[4644]: I1213 07:04:03.310517 4644 generic.go:334] "Generic (PLEG): container finished" podID="f7842864-e743-436c-a829-9c3be434cb89" containerID="e2d09db72a9ac6ea1dc14a4748a04dde81d5858632c07376178fc18cee2565bc" exitCode=0 Dec 13 07:04:03 crc kubenswrapper[4644]: I1213 07:04:03.310524 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7842864-e743-436c-a829-9c3be434cb89","Type":"ContainerDied","Data":"96568e3bb0ac20eff94edcdda1181f06bb5b82722b52ca6642af2d5f51e7751d"} Dec 13 07:04:03 crc kubenswrapper[4644]: I1213 07:04:03.310564 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7842864-e743-436c-a829-9c3be434cb89","Type":"ContainerDied","Data":"e7c4eb53540d35ad4847284d07fe75530647bac800eb776bb2866742d2f13b64"} Dec 13 07:04:03 crc kubenswrapper[4644]: I1213 07:04:03.310576 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7842864-e743-436c-a829-9c3be434cb89","Type":"ContainerDied","Data":"e2d09db72a9ac6ea1dc14a4748a04dde81d5858632c07376178fc18cee2565bc"} Dec 13 07:04:03 crc kubenswrapper[4644]: I1213 07:04:03.312426 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" event={"ID":"40dff749-3ac6-4c02-b892-e0d70c4e267f","Type":"ContainerStarted","Data":"33e65c6df8f764c759d4ba20cc968eba42e6df4c6613abcbfc424ae07f38904e"} Dec 13 07:04:03 crc kubenswrapper[4644]: I1213 07:04:03.312549 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:03 crc kubenswrapper[4644]: I1213 07:04:03.312697 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4f66fa31-b8e2-405d-8c55-75aee839481c" containerName="nova-api-log" containerID="cri-o://6da10d6334014ce1e9d1372b49a3317d2d064152c46d6b5b18ad9751d887d2f7" gracePeriod=30 Dec 13 07:04:03 crc kubenswrapper[4644]: I1213 07:04:03.312739 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4f66fa31-b8e2-405d-8c55-75aee839481c" containerName="nova-api-api" containerID="cri-o://bd6c689d21449964e13836ded5d77b6a24598fcdd7beb5e720e3bb25a0fa3380" gracePeriod=30 Dec 13 07:04:03 crc kubenswrapper[4644]: I1213 07:04:03.336006 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" podStartSLOduration=3.335986564 podStartE2EDuration="3.335986564s" podCreationTimestamp="2025-12-13 07:04:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:04:03.330678708 +0000 UTC m=+1105.545629541" watchObservedRunningTime="2025-12-13 07:04:03.335986564 +0000 UTC m=+1105.550937397" Dec 13 07:04:03 crc kubenswrapper[4644]: I1213 07:04:03.623793 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:04:04 crc kubenswrapper[4644]: I1213 07:04:04.323684 4644 generic.go:334] "Generic (PLEG): container finished" podID="4f66fa31-b8e2-405d-8c55-75aee839481c" containerID="6da10d6334014ce1e9d1372b49a3317d2d064152c46d6b5b18ad9751d887d2f7" exitCode=143 Dec 13 07:04:04 crc kubenswrapper[4644]: I1213 07:04:04.323764 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4f66fa31-b8e2-405d-8c55-75aee839481c","Type":"ContainerDied","Data":"6da10d6334014ce1e9d1372b49a3317d2d064152c46d6b5b18ad9751d887d2f7"} Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.742890 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.820917 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-sg-core-conf-yaml\") pod \"f7842864-e743-436c-a829-9c3be434cb89\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.821422 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flqp6\" (UniqueName: \"kubernetes.io/projected/f7842864-e743-436c-a829-9c3be434cb89-kube-api-access-flqp6\") pod \"f7842864-e743-436c-a829-9c3be434cb89\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.821634 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-scripts\") pod \"f7842864-e743-436c-a829-9c3be434cb89\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.821777 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-config-data\") pod \"f7842864-e743-436c-a829-9c3be434cb89\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.821880 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-ceilometer-tls-certs\") pod \"f7842864-e743-436c-a829-9c3be434cb89\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.821959 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-combined-ca-bundle\") pod \"f7842864-e743-436c-a829-9c3be434cb89\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.822060 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7842864-e743-436c-a829-9c3be434cb89-run-httpd\") pod \"f7842864-e743-436c-a829-9c3be434cb89\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.822256 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7842864-e743-436c-a829-9c3be434cb89-log-httpd\") pod \"f7842864-e743-436c-a829-9c3be434cb89\" (UID: \"f7842864-e743-436c-a829-9c3be434cb89\") " Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.822431 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7842864-e743-436c-a829-9c3be434cb89-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f7842864-e743-436c-a829-9c3be434cb89" (UID: "f7842864-e743-436c-a829-9c3be434cb89"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.823317 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7842864-e743-436c-a829-9c3be434cb89-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f7842864-e743-436c-a829-9c3be434cb89" (UID: "f7842864-e743-436c-a829-9c3be434cb89"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.824298 4644 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7842864-e743-436c-a829-9c3be434cb89-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.824363 4644 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f7842864-e743-436c-a829-9c3be434cb89-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.828989 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7842864-e743-436c-a829-9c3be434cb89-kube-api-access-flqp6" (OuterVolumeSpecName: "kube-api-access-flqp6") pod "f7842864-e743-436c-a829-9c3be434cb89" (UID: "f7842864-e743-436c-a829-9c3be434cb89"). InnerVolumeSpecName "kube-api-access-flqp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.829005 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-scripts" (OuterVolumeSpecName: "scripts") pod "f7842864-e743-436c-a829-9c3be434cb89" (UID: "f7842864-e743-436c-a829-9c3be434cb89"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.840771 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.848360 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f7842864-e743-436c-a829-9c3be434cb89" (UID: "f7842864-e743-436c-a829-9c3be434cb89"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.878290 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "f7842864-e743-436c-a829-9c3be434cb89" (UID: "f7842864-e743-436c-a829-9c3be434cb89"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.895506 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7842864-e743-436c-a829-9c3be434cb89" (UID: "f7842864-e743-436c-a829-9c3be434cb89"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.926735 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f66fa31-b8e2-405d-8c55-75aee839481c-config-data\") pod \"4f66fa31-b8e2-405d-8c55-75aee839481c\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.926785 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f66fa31-b8e2-405d-8c55-75aee839481c-logs\") pod \"4f66fa31-b8e2-405d-8c55-75aee839481c\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.926923 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f66fa31-b8e2-405d-8c55-75aee839481c-combined-ca-bundle\") pod \"4f66fa31-b8e2-405d-8c55-75aee839481c\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.927068 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vjth\" (UniqueName: \"kubernetes.io/projected/4f66fa31-b8e2-405d-8c55-75aee839481c-kube-api-access-7vjth\") pod \"4f66fa31-b8e2-405d-8c55-75aee839481c\" (UID: \"4f66fa31-b8e2-405d-8c55-75aee839481c\") " Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.927929 4644 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.927943 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flqp6\" (UniqueName: \"kubernetes.io/projected/f7842864-e743-436c-a829-9c3be434cb89-kube-api-access-flqp6\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.927955 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.927966 4644 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.927974 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.928700 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f66fa31-b8e2-405d-8c55-75aee839481c-logs" (OuterVolumeSpecName: "logs") pod "4f66fa31-b8e2-405d-8c55-75aee839481c" (UID: "4f66fa31-b8e2-405d-8c55-75aee839481c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.934575 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f66fa31-b8e2-405d-8c55-75aee839481c-kube-api-access-7vjth" (OuterVolumeSpecName: "kube-api-access-7vjth") pod "4f66fa31-b8e2-405d-8c55-75aee839481c" (UID: "4f66fa31-b8e2-405d-8c55-75aee839481c"). InnerVolumeSpecName "kube-api-access-7vjth". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.934676 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-config-data" (OuterVolumeSpecName: "config-data") pod "f7842864-e743-436c-a829-9c3be434cb89" (UID: "f7842864-e743-436c-a829-9c3be434cb89"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.954810 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f66fa31-b8e2-405d-8c55-75aee839481c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f66fa31-b8e2-405d-8c55-75aee839481c" (UID: "4f66fa31-b8e2-405d-8c55-75aee839481c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:06 crc kubenswrapper[4644]: I1213 07:04:06.957746 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f66fa31-b8e2-405d-8c55-75aee839481c-config-data" (OuterVolumeSpecName: "config-data") pod "4f66fa31-b8e2-405d-8c55-75aee839481c" (UID: "4f66fa31-b8e2-405d-8c55-75aee839481c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.030646 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vjth\" (UniqueName: \"kubernetes.io/projected/4f66fa31-b8e2-405d-8c55-75aee839481c-kube-api-access-7vjth\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.030683 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f66fa31-b8e2-405d-8c55-75aee839481c-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.030695 4644 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f66fa31-b8e2-405d-8c55-75aee839481c-logs\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.030704 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7842864-e743-436c-a829-9c3be434cb89-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.030713 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f66fa31-b8e2-405d-8c55-75aee839481c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.349778 4644 generic.go:334] "Generic (PLEG): container finished" podID="f7842864-e743-436c-a829-9c3be434cb89" containerID="bad98fad7253b63abb5a9336a7c07a4f4a15f25ab8d981fb9bec1cc5203dc650" exitCode=0 Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.349866 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7842864-e743-436c-a829-9c3be434cb89","Type":"ContainerDied","Data":"bad98fad7253b63abb5a9336a7c07a4f4a15f25ab8d981fb9bec1cc5203dc650"} Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.349888 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.349909 4644 scope.go:117] "RemoveContainer" containerID="96568e3bb0ac20eff94edcdda1181f06bb5b82722b52ca6642af2d5f51e7751d" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.349897 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f7842864-e743-436c-a829-9c3be434cb89","Type":"ContainerDied","Data":"e3ebc03f35d679daf4a71301c3910c0c6675f0811e2c1aa595c9db63d833aef3"} Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.352240 4644 generic.go:334] "Generic (PLEG): container finished" podID="4f66fa31-b8e2-405d-8c55-75aee839481c" containerID="bd6c689d21449964e13836ded5d77b6a24598fcdd7beb5e720e3bb25a0fa3380" exitCode=0 Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.352267 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4f66fa31-b8e2-405d-8c55-75aee839481c","Type":"ContainerDied","Data":"bd6c689d21449964e13836ded5d77b6a24598fcdd7beb5e720e3bb25a0fa3380"} Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.352288 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4f66fa31-b8e2-405d-8c55-75aee839481c","Type":"ContainerDied","Data":"1a5d2c0515a2a32533c97469f0df96da2809e83b6b444a76f82e84a04ef0ee47"} Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.352352 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.369133 4644 scope.go:117] "RemoveContainer" containerID="e7c4eb53540d35ad4847284d07fe75530647bac800eb776bb2866742d2f13b64" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.395595 4644 scope.go:117] "RemoveContainer" containerID="bad98fad7253b63abb5a9336a7c07a4f4a15f25ab8d981fb9bec1cc5203dc650" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.427675 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.447367 4644 scope.go:117] "RemoveContainer" containerID="e2d09db72a9ac6ea1dc14a4748a04dde81d5858632c07376178fc18cee2565bc" Dec 13 07:04:07 crc kubenswrapper[4644]: E1213 07:04:07.459579 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f66fa31_b8e2_405d_8c55_75aee839481c.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7842864_e743_436c_a829_9c3be434cb89.slice\": RecentStats: unable to find data in memory cache]" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.468746 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.493594 4644 scope.go:117] "RemoveContainer" containerID="96568e3bb0ac20eff94edcdda1181f06bb5b82722b52ca6642af2d5f51e7751d" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.510093 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:04:07 crc kubenswrapper[4644]: E1213 07:04:07.511094 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96568e3bb0ac20eff94edcdda1181f06bb5b82722b52ca6642af2d5f51e7751d\": container with ID starting with 96568e3bb0ac20eff94edcdda1181f06bb5b82722b52ca6642af2d5f51e7751d not found: ID does not exist" containerID="96568e3bb0ac20eff94edcdda1181f06bb5b82722b52ca6642af2d5f51e7751d" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.511143 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96568e3bb0ac20eff94edcdda1181f06bb5b82722b52ca6642af2d5f51e7751d"} err="failed to get container status \"96568e3bb0ac20eff94edcdda1181f06bb5b82722b52ca6642af2d5f51e7751d\": rpc error: code = NotFound desc = could not find container \"96568e3bb0ac20eff94edcdda1181f06bb5b82722b52ca6642af2d5f51e7751d\": container with ID starting with 96568e3bb0ac20eff94edcdda1181f06bb5b82722b52ca6642af2d5f51e7751d not found: ID does not exist" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.511171 4644 scope.go:117] "RemoveContainer" containerID="e7c4eb53540d35ad4847284d07fe75530647bac800eb776bb2866742d2f13b64" Dec 13 07:04:07 crc kubenswrapper[4644]: E1213 07:04:07.511922 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7c4eb53540d35ad4847284d07fe75530647bac800eb776bb2866742d2f13b64\": container with ID starting with e7c4eb53540d35ad4847284d07fe75530647bac800eb776bb2866742d2f13b64 not found: ID does not exist" containerID="e7c4eb53540d35ad4847284d07fe75530647bac800eb776bb2866742d2f13b64" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.511955 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7c4eb53540d35ad4847284d07fe75530647bac800eb776bb2866742d2f13b64"} err="failed to get container status \"e7c4eb53540d35ad4847284d07fe75530647bac800eb776bb2866742d2f13b64\": rpc error: code = NotFound desc = could not find container \"e7c4eb53540d35ad4847284d07fe75530647bac800eb776bb2866742d2f13b64\": container with ID starting with e7c4eb53540d35ad4847284d07fe75530647bac800eb776bb2866742d2f13b64 not found: ID does not exist" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.511983 4644 scope.go:117] "RemoveContainer" containerID="bad98fad7253b63abb5a9336a7c07a4f4a15f25ab8d981fb9bec1cc5203dc650" Dec 13 07:04:07 crc kubenswrapper[4644]: E1213 07:04:07.518568 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bad98fad7253b63abb5a9336a7c07a4f4a15f25ab8d981fb9bec1cc5203dc650\": container with ID starting with bad98fad7253b63abb5a9336a7c07a4f4a15f25ab8d981fb9bec1cc5203dc650 not found: ID does not exist" containerID="bad98fad7253b63abb5a9336a7c07a4f4a15f25ab8d981fb9bec1cc5203dc650" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.518601 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bad98fad7253b63abb5a9336a7c07a4f4a15f25ab8d981fb9bec1cc5203dc650"} err="failed to get container status \"bad98fad7253b63abb5a9336a7c07a4f4a15f25ab8d981fb9bec1cc5203dc650\": rpc error: code = NotFound desc = could not find container \"bad98fad7253b63abb5a9336a7c07a4f4a15f25ab8d981fb9bec1cc5203dc650\": container with ID starting with bad98fad7253b63abb5a9336a7c07a4f4a15f25ab8d981fb9bec1cc5203dc650 not found: ID does not exist" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.518624 4644 scope.go:117] "RemoveContainer" containerID="e2d09db72a9ac6ea1dc14a4748a04dde81d5858632c07376178fc18cee2565bc" Dec 13 07:04:07 crc kubenswrapper[4644]: E1213 07:04:07.523532 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2d09db72a9ac6ea1dc14a4748a04dde81d5858632c07376178fc18cee2565bc\": container with ID starting with e2d09db72a9ac6ea1dc14a4748a04dde81d5858632c07376178fc18cee2565bc not found: ID does not exist" containerID="e2d09db72a9ac6ea1dc14a4748a04dde81d5858632c07376178fc18cee2565bc" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.523556 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2d09db72a9ac6ea1dc14a4748a04dde81d5858632c07376178fc18cee2565bc"} err="failed to get container status \"e2d09db72a9ac6ea1dc14a4748a04dde81d5858632c07376178fc18cee2565bc\": rpc error: code = NotFound desc = could not find container \"e2d09db72a9ac6ea1dc14a4748a04dde81d5858632c07376178fc18cee2565bc\": container with ID starting with e2d09db72a9ac6ea1dc14a4748a04dde81d5858632c07376178fc18cee2565bc not found: ID does not exist" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.523578 4644 scope.go:117] "RemoveContainer" containerID="bd6c689d21449964e13836ded5d77b6a24598fcdd7beb5e720e3bb25a0fa3380" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.530582 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.565612 4644 scope.go:117] "RemoveContainer" containerID="6da10d6334014ce1e9d1372b49a3317d2d064152c46d6b5b18ad9751d887d2f7" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.568506 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:04:07 crc kubenswrapper[4644]: E1213 07:04:07.569031 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="proxy-httpd" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.569046 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="proxy-httpd" Dec 13 07:04:07 crc kubenswrapper[4644]: E1213 07:04:07.569065 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="ceilometer-central-agent" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.569072 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="ceilometer-central-agent" Dec 13 07:04:07 crc kubenswrapper[4644]: E1213 07:04:07.569082 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f66fa31-b8e2-405d-8c55-75aee839481c" containerName="nova-api-log" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.569105 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f66fa31-b8e2-405d-8c55-75aee839481c" containerName="nova-api-log" Dec 13 07:04:07 crc kubenswrapper[4644]: E1213 07:04:07.569117 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="ceilometer-notification-agent" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.569123 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="ceilometer-notification-agent" Dec 13 07:04:07 crc kubenswrapper[4644]: E1213 07:04:07.569141 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f66fa31-b8e2-405d-8c55-75aee839481c" containerName="nova-api-api" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.569146 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f66fa31-b8e2-405d-8c55-75aee839481c" containerName="nova-api-api" Dec 13 07:04:07 crc kubenswrapper[4644]: E1213 07:04:07.569161 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="sg-core" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.569183 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="sg-core" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.569397 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="proxy-httpd" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.569424 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="ceilometer-notification-agent" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.569433 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="sg-core" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.569461 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f66fa31-b8e2-405d-8c55-75aee839481c" containerName="nova-api-log" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.569470 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f66fa31-b8e2-405d-8c55-75aee839481c" containerName="nova-api-api" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.569487 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7842864-e743-436c-a829-9c3be434cb89" containerName="ceilometer-central-agent" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.571640 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.575050 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.575105 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.578734 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.585499 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.593971 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.595208 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.597990 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.598160 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.599637 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.604968 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.626600 4644 scope.go:117] "RemoveContainer" containerID="bd6c689d21449964e13836ded5d77b6a24598fcdd7beb5e720e3bb25a0fa3380" Dec 13 07:04:07 crc kubenswrapper[4644]: E1213 07:04:07.629287 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd6c689d21449964e13836ded5d77b6a24598fcdd7beb5e720e3bb25a0fa3380\": container with ID starting with bd6c689d21449964e13836ded5d77b6a24598fcdd7beb5e720e3bb25a0fa3380 not found: ID does not exist" containerID="bd6c689d21449964e13836ded5d77b6a24598fcdd7beb5e720e3bb25a0fa3380" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.629319 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd6c689d21449964e13836ded5d77b6a24598fcdd7beb5e720e3bb25a0fa3380"} err="failed to get container status \"bd6c689d21449964e13836ded5d77b6a24598fcdd7beb5e720e3bb25a0fa3380\": rpc error: code = NotFound desc = could not find container \"bd6c689d21449964e13836ded5d77b6a24598fcdd7beb5e720e3bb25a0fa3380\": container with ID starting with bd6c689d21449964e13836ded5d77b6a24598fcdd7beb5e720e3bb25a0fa3380 not found: ID does not exist" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.629343 4644 scope.go:117] "RemoveContainer" containerID="6da10d6334014ce1e9d1372b49a3317d2d064152c46d6b5b18ad9751d887d2f7" Dec 13 07:04:07 crc kubenswrapper[4644]: E1213 07:04:07.630115 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6da10d6334014ce1e9d1372b49a3317d2d064152c46d6b5b18ad9751d887d2f7\": container with ID starting with 6da10d6334014ce1e9d1372b49a3317d2d064152c46d6b5b18ad9751d887d2f7 not found: ID does not exist" containerID="6da10d6334014ce1e9d1372b49a3317d2d064152c46d6b5b18ad9751d887d2f7" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.630171 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6da10d6334014ce1e9d1372b49a3317d2d064152c46d6b5b18ad9751d887d2f7"} err="failed to get container status \"6da10d6334014ce1e9d1372b49a3317d2d064152c46d6b5b18ad9751d887d2f7\": rpc error: code = NotFound desc = could not find container \"6da10d6334014ce1e9d1372b49a3317d2d064152c46d6b5b18ad9751d887d2f7\": container with ID starting with 6da10d6334014ce1e9d1372b49a3317d2d064152c46d6b5b18ad9751d887d2f7 not found: ID does not exist" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.658050 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-log-httpd\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.658126 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.658162 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btz42\" (UniqueName: \"kubernetes.io/projected/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-kube-api-access-btz42\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.658201 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-public-tls-certs\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.658223 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.658313 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-config-data\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.658333 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-config-data\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.658564 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-scripts\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.658693 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-run-httpd\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.658753 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.659084 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5d7t\" (UniqueName: \"kubernetes.io/projected/9d1b8c9d-b131-4021-893d-ff973a4218d5-kube-api-access-b5d7t\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.659110 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.659168 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.659226 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d1b8c9d-b131-4021-893d-ff973a4218d5-logs\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.761629 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btz42\" (UniqueName: \"kubernetes.io/projected/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-kube-api-access-btz42\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.761757 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-public-tls-certs\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.762761 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.762854 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-config-data\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.762881 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-config-data\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.762967 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-scripts\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.763065 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-run-httpd\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.763119 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.763423 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5d7t\" (UniqueName: \"kubernetes.io/projected/9d1b8c9d-b131-4021-893d-ff973a4218d5-kube-api-access-b5d7t\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.763480 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.763539 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.763580 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d1b8c9d-b131-4021-893d-ff973a4218d5-logs\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.763614 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-log-httpd\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.763632 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.764972 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-run-httpd\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.766989 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-config-data\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.767238 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d1b8c9d-b131-4021-893d-ff973a4218d5-logs\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.767487 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-log-httpd\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.768358 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-public-tls-certs\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.768700 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-config-data\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.770345 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-scripts\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.770769 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.770846 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.771204 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.772244 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.775163 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.778338 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btz42\" (UniqueName: \"kubernetes.io/projected/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-kube-api-access-btz42\") pod \"ceilometer-0\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.778743 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5d7t\" (UniqueName: \"kubernetes.io/projected/9d1b8c9d-b131-4021-893d-ff973a4218d5-kube-api-access-b5d7t\") pod \"nova-api-0\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " pod="openstack/nova-api-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.931131 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:04:07 crc kubenswrapper[4644]: I1213 07:04:07.940091 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 07:04:08 crc kubenswrapper[4644]: I1213 07:04:08.350258 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:04:08 crc kubenswrapper[4644]: I1213 07:04:08.370884 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be90da58-f6f5-4c6d-9f0a-1ddd56500d66","Type":"ContainerStarted","Data":"02d992f6979288bcd86acf9f15ee22b5390b6f84624744080a99b4c25f43efb1"} Dec 13 07:04:08 crc kubenswrapper[4644]: I1213 07:04:08.397076 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f66fa31-b8e2-405d-8c55-75aee839481c" path="/var/lib/kubelet/pods/4f66fa31-b8e2-405d-8c55-75aee839481c/volumes" Dec 13 07:04:08 crc kubenswrapper[4644]: I1213 07:04:08.397728 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7842864-e743-436c-a829-9c3be434cb89" path="/var/lib/kubelet/pods/f7842864-e743-436c-a829-9c3be434cb89/volumes" Dec 13 07:04:08 crc kubenswrapper[4644]: W1213 07:04:08.408870 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d1b8c9d_b131_4021_893d_ff973a4218d5.slice/crio-83fd6a6342f6e5902d080fdd5a4b723dc3fb62c69ca14ea682b08af5981c5cab WatchSource:0}: Error finding container 83fd6a6342f6e5902d080fdd5a4b723dc3fb62c69ca14ea682b08af5981c5cab: Status 404 returned error can't find the container with id 83fd6a6342f6e5902d080fdd5a4b723dc3fb62c69ca14ea682b08af5981c5cab Dec 13 07:04:08 crc kubenswrapper[4644]: I1213 07:04:08.413775 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:04:08 crc kubenswrapper[4644]: I1213 07:04:08.623035 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:04:08 crc kubenswrapper[4644]: I1213 07:04:08.638307 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.383569 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be90da58-f6f5-4c6d-9f0a-1ddd56500d66","Type":"ContainerStarted","Data":"6b28384b35a84f36ef44c38fb5854e57d4286eb11c3508fb01aa72f6006fb757"} Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.385412 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9d1b8c9d-b131-4021-893d-ff973a4218d5","Type":"ContainerStarted","Data":"f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33"} Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.385459 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9d1b8c9d-b131-4021-893d-ff973a4218d5","Type":"ContainerStarted","Data":"cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394"} Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.385474 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9d1b8c9d-b131-4021-893d-ff973a4218d5","Type":"ContainerStarted","Data":"83fd6a6342f6e5902d080fdd5a4b723dc3fb62c69ca14ea682b08af5981c5cab"} Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.400984 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.426602 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.426581507 podStartE2EDuration="2.426581507s" podCreationTimestamp="2025-12-13 07:04:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:04:09.407330715 +0000 UTC m=+1111.622281547" watchObservedRunningTime="2025-12-13 07:04:09.426581507 +0000 UTC m=+1111.641532340" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.523042 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-4h7xs"] Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.524462 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.526340 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.526608 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.533159 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-4h7xs"] Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.597988 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-scripts\") pod \"nova-cell1-cell-mapping-4h7xs\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.598079 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-config-data\") pod \"nova-cell1-cell-mapping-4h7xs\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.598164 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-4h7xs\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.598398 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kgh2\" (UniqueName: \"kubernetes.io/projected/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-kube-api-access-8kgh2\") pod \"nova-cell1-cell-mapping-4h7xs\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.699771 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-scripts\") pod \"nova-cell1-cell-mapping-4h7xs\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.699844 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-config-data\") pod \"nova-cell1-cell-mapping-4h7xs\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.699901 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-4h7xs\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.700058 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kgh2\" (UniqueName: \"kubernetes.io/projected/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-kube-api-access-8kgh2\") pod \"nova-cell1-cell-mapping-4h7xs\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.704499 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-4h7xs\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.705537 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-config-data\") pod \"nova-cell1-cell-mapping-4h7xs\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.712842 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-scripts\") pod \"nova-cell1-cell-mapping-4h7xs\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.714235 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kgh2\" (UniqueName: \"kubernetes.io/projected/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-kube-api-access-8kgh2\") pod \"nova-cell1-cell-mapping-4h7xs\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.753924 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.753982 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:04:09 crc kubenswrapper[4644]: I1213 07:04:09.854784 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:10 crc kubenswrapper[4644]: I1213 07:04:10.245751 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-4h7xs"] Dec 13 07:04:10 crc kubenswrapper[4644]: W1213 07:04:10.246775 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf3e0cf20_65fd_4d5e_b1ae_a8c51f29b440.slice/crio-ed3f9a4f7eb90c84df0a54ad7275dd781059a321286c7ed5de287fa6a960241b WatchSource:0}: Error finding container ed3f9a4f7eb90c84df0a54ad7275dd781059a321286c7ed5de287fa6a960241b: Status 404 returned error can't find the container with id ed3f9a4f7eb90c84df0a54ad7275dd781059a321286c7ed5de287fa6a960241b Dec 13 07:04:10 crc kubenswrapper[4644]: I1213 07:04:10.398747 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-4h7xs" event={"ID":"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440","Type":"ContainerStarted","Data":"d3474a3cb232f3aa7fc1ca89462ddac691abf6f598caeda5f214b774725cd68c"} Dec 13 07:04:10 crc kubenswrapper[4644]: I1213 07:04:10.398781 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-4h7xs" event={"ID":"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440","Type":"ContainerStarted","Data":"ed3f9a4f7eb90c84df0a54ad7275dd781059a321286c7ed5de287fa6a960241b"} Dec 13 07:04:10 crc kubenswrapper[4644]: I1213 07:04:10.399389 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be90da58-f6f5-4c6d-9f0a-1ddd56500d66","Type":"ContainerStarted","Data":"28ce5e978887acf34be2d4cacbc886c7a64466edfd473c97f732a1bb15811c53"} Dec 13 07:04:10 crc kubenswrapper[4644]: I1213 07:04:10.415017 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-4h7xs" podStartSLOduration=1.41500469 podStartE2EDuration="1.41500469s" podCreationTimestamp="2025-12-13 07:04:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:04:10.411698827 +0000 UTC m=+1112.626649660" watchObservedRunningTime="2025-12-13 07:04:10.41500469 +0000 UTC m=+1112.629955523" Dec 13 07:04:10 crc kubenswrapper[4644]: I1213 07:04:10.769602 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:04:10 crc kubenswrapper[4644]: I1213 07:04:10.831093 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8d97cbc7-mf4xx"] Dec 13 07:04:10 crc kubenswrapper[4644]: I1213 07:04:10.831332 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" podUID="24878fb4-24ad-4fa3-ad74-edf2b9b64e3c" containerName="dnsmasq-dns" containerID="cri-o://03eecbc4efe35048316411a12731d8b644c9395a41db9192287ba1674bdc81e1" gracePeriod=10 Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.204370 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.356433 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-dns-svc\") pod \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.356509 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlcpm\" (UniqueName: \"kubernetes.io/projected/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-kube-api-access-vlcpm\") pod \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.356531 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-config\") pod \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.356597 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-ovsdbserver-nb\") pod \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.356645 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-ovsdbserver-sb\") pod \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\" (UID: \"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c\") " Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.361580 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-kube-api-access-vlcpm" (OuterVolumeSpecName: "kube-api-access-vlcpm") pod "24878fb4-24ad-4fa3-ad74-edf2b9b64e3c" (UID: "24878fb4-24ad-4fa3-ad74-edf2b9b64e3c"). InnerVolumeSpecName "kube-api-access-vlcpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.392152 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "24878fb4-24ad-4fa3-ad74-edf2b9b64e3c" (UID: "24878fb4-24ad-4fa3-ad74-edf2b9b64e3c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.397507 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "24878fb4-24ad-4fa3-ad74-edf2b9b64e3c" (UID: "24878fb4-24ad-4fa3-ad74-edf2b9b64e3c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.398537 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-config" (OuterVolumeSpecName: "config") pod "24878fb4-24ad-4fa3-ad74-edf2b9b64e3c" (UID: "24878fb4-24ad-4fa3-ad74-edf2b9b64e3c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.400516 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "24878fb4-24ad-4fa3-ad74-edf2b9b64e3c" (UID: "24878fb4-24ad-4fa3-ad74-edf2b9b64e3c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.410199 4644 generic.go:334] "Generic (PLEG): container finished" podID="24878fb4-24ad-4fa3-ad74-edf2b9b64e3c" containerID="03eecbc4efe35048316411a12731d8b644c9395a41db9192287ba1674bdc81e1" exitCode=0 Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.410319 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" event={"ID":"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c","Type":"ContainerDied","Data":"03eecbc4efe35048316411a12731d8b644c9395a41db9192287ba1674bdc81e1"} Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.410349 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" event={"ID":"24878fb4-24ad-4fa3-ad74-edf2b9b64e3c","Type":"ContainerDied","Data":"baa2d8a8001baed09f5e61908b626a4db4e1a4a6f7e015fa4f069de891924ba3"} Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.410380 4644 scope.go:117] "RemoveContainer" containerID="03eecbc4efe35048316411a12731d8b644c9395a41db9192287ba1674bdc81e1" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.410562 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8d97cbc7-mf4xx" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.416717 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be90da58-f6f5-4c6d-9f0a-1ddd56500d66","Type":"ContainerStarted","Data":"df1112b11e8df28beec36049f33498ab6b3d1993e8c2f119c1e032b7db27cc5f"} Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.427897 4644 scope.go:117] "RemoveContainer" containerID="f10664d9e74d1a420e0ed1793cd65d600b33e0f0b64df2c03cbf2a234d09b836" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.445742 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8d97cbc7-mf4xx"] Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.458486 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8d97cbc7-mf4xx"] Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.463057 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.463082 4644 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.463096 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.463137 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlcpm\" (UniqueName: \"kubernetes.io/projected/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-kube-api-access-vlcpm\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.463165 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.466786 4644 scope.go:117] "RemoveContainer" containerID="03eecbc4efe35048316411a12731d8b644c9395a41db9192287ba1674bdc81e1" Dec 13 07:04:11 crc kubenswrapper[4644]: E1213 07:04:11.467238 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03eecbc4efe35048316411a12731d8b644c9395a41db9192287ba1674bdc81e1\": container with ID starting with 03eecbc4efe35048316411a12731d8b644c9395a41db9192287ba1674bdc81e1 not found: ID does not exist" containerID="03eecbc4efe35048316411a12731d8b644c9395a41db9192287ba1674bdc81e1" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.467279 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03eecbc4efe35048316411a12731d8b644c9395a41db9192287ba1674bdc81e1"} err="failed to get container status \"03eecbc4efe35048316411a12731d8b644c9395a41db9192287ba1674bdc81e1\": rpc error: code = NotFound desc = could not find container \"03eecbc4efe35048316411a12731d8b644c9395a41db9192287ba1674bdc81e1\": container with ID starting with 03eecbc4efe35048316411a12731d8b644c9395a41db9192287ba1674bdc81e1 not found: ID does not exist" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.467305 4644 scope.go:117] "RemoveContainer" containerID="f10664d9e74d1a420e0ed1793cd65d600b33e0f0b64df2c03cbf2a234d09b836" Dec 13 07:04:11 crc kubenswrapper[4644]: E1213 07:04:11.468102 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f10664d9e74d1a420e0ed1793cd65d600b33e0f0b64df2c03cbf2a234d09b836\": container with ID starting with f10664d9e74d1a420e0ed1793cd65d600b33e0f0b64df2c03cbf2a234d09b836 not found: ID does not exist" containerID="f10664d9e74d1a420e0ed1793cd65d600b33e0f0b64df2c03cbf2a234d09b836" Dec 13 07:04:11 crc kubenswrapper[4644]: I1213 07:04:11.468181 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f10664d9e74d1a420e0ed1793cd65d600b33e0f0b64df2c03cbf2a234d09b836"} err="failed to get container status \"f10664d9e74d1a420e0ed1793cd65d600b33e0f0b64df2c03cbf2a234d09b836\": rpc error: code = NotFound desc = could not find container \"f10664d9e74d1a420e0ed1793cd65d600b33e0f0b64df2c03cbf2a234d09b836\": container with ID starting with f10664d9e74d1a420e0ed1793cd65d600b33e0f0b64df2c03cbf2a234d09b836 not found: ID does not exist" Dec 13 07:04:12 crc kubenswrapper[4644]: I1213 07:04:12.399861 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24878fb4-24ad-4fa3-ad74-edf2b9b64e3c" path="/var/lib/kubelet/pods/24878fb4-24ad-4fa3-ad74-edf2b9b64e3c/volumes" Dec 13 07:04:13 crc kubenswrapper[4644]: I1213 07:04:13.435928 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be90da58-f6f5-4c6d-9f0a-1ddd56500d66","Type":"ContainerStarted","Data":"c2b239327faaa7cba97565ba270ded26adfb4e3686407790983e9f8fddc11507"} Dec 13 07:04:13 crc kubenswrapper[4644]: I1213 07:04:13.437000 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 07:04:13 crc kubenswrapper[4644]: I1213 07:04:13.457124 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.346255835 podStartE2EDuration="6.457108809s" podCreationTimestamp="2025-12-13 07:04:07 +0000 UTC" firstStartedPulling="2025-12-13 07:04:08.362967853 +0000 UTC m=+1110.577918687" lastFinishedPulling="2025-12-13 07:04:12.473820828 +0000 UTC m=+1114.688771661" observedRunningTime="2025-12-13 07:04:13.452072283 +0000 UTC m=+1115.667023116" watchObservedRunningTime="2025-12-13 07:04:13.457108809 +0000 UTC m=+1115.672059632" Dec 13 07:04:14 crc kubenswrapper[4644]: I1213 07:04:14.445346 4644 generic.go:334] "Generic (PLEG): container finished" podID="f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440" containerID="d3474a3cb232f3aa7fc1ca89462ddac691abf6f598caeda5f214b774725cd68c" exitCode=0 Dec 13 07:04:14 crc kubenswrapper[4644]: I1213 07:04:14.445392 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-4h7xs" event={"ID":"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440","Type":"ContainerDied","Data":"d3474a3cb232f3aa7fc1ca89462ddac691abf6f598caeda5f214b774725cd68c"} Dec 13 07:04:15 crc kubenswrapper[4644]: I1213 07:04:15.738017 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:15 crc kubenswrapper[4644]: I1213 07:04:15.852475 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-config-data\") pod \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " Dec 13 07:04:15 crc kubenswrapper[4644]: I1213 07:04:15.852551 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-combined-ca-bundle\") pod \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " Dec 13 07:04:15 crc kubenswrapper[4644]: I1213 07:04:15.852662 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8kgh2\" (UniqueName: \"kubernetes.io/projected/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-kube-api-access-8kgh2\") pod \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " Dec 13 07:04:15 crc kubenswrapper[4644]: I1213 07:04:15.852742 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-scripts\") pod \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\" (UID: \"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440\") " Dec 13 07:04:15 crc kubenswrapper[4644]: I1213 07:04:15.858592 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-kube-api-access-8kgh2" (OuterVolumeSpecName: "kube-api-access-8kgh2") pod "f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440" (UID: "f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440"). InnerVolumeSpecName "kube-api-access-8kgh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:15 crc kubenswrapper[4644]: I1213 07:04:15.859041 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-scripts" (OuterVolumeSpecName: "scripts") pod "f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440" (UID: "f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:15 crc kubenswrapper[4644]: I1213 07:04:15.875542 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-config-data" (OuterVolumeSpecName: "config-data") pod "f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440" (UID: "f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:15 crc kubenswrapper[4644]: I1213 07:04:15.875603 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440" (UID: "f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:15 crc kubenswrapper[4644]: I1213 07:04:15.955613 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:15 crc kubenswrapper[4644]: I1213 07:04:15.955664 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:15 crc kubenswrapper[4644]: I1213 07:04:15.955674 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:15 crc kubenswrapper[4644]: I1213 07:04:15.955687 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8kgh2\" (UniqueName: \"kubernetes.io/projected/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440-kube-api-access-8kgh2\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:16 crc kubenswrapper[4644]: I1213 07:04:16.462569 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-4h7xs" event={"ID":"f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440","Type":"ContainerDied","Data":"ed3f9a4f7eb90c84df0a54ad7275dd781059a321286c7ed5de287fa6a960241b"} Dec 13 07:04:16 crc kubenswrapper[4644]: I1213 07:04:16.462850 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed3f9a4f7eb90c84df0a54ad7275dd781059a321286c7ed5de287fa6a960241b" Dec 13 07:04:16 crc kubenswrapper[4644]: I1213 07:04:16.462648 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-4h7xs" Dec 13 07:04:16 crc kubenswrapper[4644]: I1213 07:04:16.627412 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:04:16 crc kubenswrapper[4644]: I1213 07:04:16.627659 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9d1b8c9d-b131-4021-893d-ff973a4218d5" containerName="nova-api-log" containerID="cri-o://cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394" gracePeriod=30 Dec 13 07:04:16 crc kubenswrapper[4644]: I1213 07:04:16.627742 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9d1b8c9d-b131-4021-893d-ff973a4218d5" containerName="nova-api-api" containerID="cri-o://f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33" gracePeriod=30 Dec 13 07:04:16 crc kubenswrapper[4644]: I1213 07:04:16.641391 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 07:04:16 crc kubenswrapper[4644]: I1213 07:04:16.641630 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="9a9cd4cc-b556-46d6-a80c-39b4312abc2a" containerName="nova-scheduler-scheduler" containerID="cri-o://f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2" gracePeriod=30 Dec 13 07:04:16 crc kubenswrapper[4644]: I1213 07:04:16.654202 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:04:16 crc kubenswrapper[4644]: I1213 07:04:16.654526 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="996b5118-5c68-484e-84e4-520e4e895a4d" containerName="nova-metadata-log" containerID="cri-o://e184cd7401aa2a0a7a3725a7d59cf6bef4b45dfa1c20d220b327e6ad646415ea" gracePeriod=30 Dec 13 07:04:16 crc kubenswrapper[4644]: I1213 07:04:16.654731 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="996b5118-5c68-484e-84e4-520e4e895a4d" containerName="nova-metadata-metadata" containerID="cri-o://188fa5495797aaea18d0cb461e779a677bef6faabdae10a28c2eab1a8788e1b9" gracePeriod=30 Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.209995 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.281403 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d1b8c9d-b131-4021-893d-ff973a4218d5-logs\") pod \"9d1b8c9d-b131-4021-893d-ff973a4218d5\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.281590 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-internal-tls-certs\") pod \"9d1b8c9d-b131-4021-893d-ff973a4218d5\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.281642 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-combined-ca-bundle\") pod \"9d1b8c9d-b131-4021-893d-ff973a4218d5\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.281681 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5d7t\" (UniqueName: \"kubernetes.io/projected/9d1b8c9d-b131-4021-893d-ff973a4218d5-kube-api-access-b5d7t\") pod \"9d1b8c9d-b131-4021-893d-ff973a4218d5\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.281703 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-public-tls-certs\") pod \"9d1b8c9d-b131-4021-893d-ff973a4218d5\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.281729 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-config-data\") pod \"9d1b8c9d-b131-4021-893d-ff973a4218d5\" (UID: \"9d1b8c9d-b131-4021-893d-ff973a4218d5\") " Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.282584 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d1b8c9d-b131-4021-893d-ff973a4218d5-logs" (OuterVolumeSpecName: "logs") pod "9d1b8c9d-b131-4021-893d-ff973a4218d5" (UID: "9d1b8c9d-b131-4021-893d-ff973a4218d5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.287238 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d1b8c9d-b131-4021-893d-ff973a4218d5-kube-api-access-b5d7t" (OuterVolumeSpecName: "kube-api-access-b5d7t") pod "9d1b8c9d-b131-4021-893d-ff973a4218d5" (UID: "9d1b8c9d-b131-4021-893d-ff973a4218d5"). InnerVolumeSpecName "kube-api-access-b5d7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.305960 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-config-data" (OuterVolumeSpecName: "config-data") pod "9d1b8c9d-b131-4021-893d-ff973a4218d5" (UID: "9d1b8c9d-b131-4021-893d-ff973a4218d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.308477 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d1b8c9d-b131-4021-893d-ff973a4218d5" (UID: "9d1b8c9d-b131-4021-893d-ff973a4218d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.324307 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9d1b8c9d-b131-4021-893d-ff973a4218d5" (UID: "9d1b8c9d-b131-4021-893d-ff973a4218d5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.325220 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "9d1b8c9d-b131-4021-893d-ff973a4218d5" (UID: "9d1b8c9d-b131-4021-893d-ff973a4218d5"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.384495 4644 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d1b8c9d-b131-4021-893d-ff973a4218d5-logs\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.384537 4644 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.384548 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.384558 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5d7t\" (UniqueName: \"kubernetes.io/projected/9d1b8c9d-b131-4021-893d-ff973a4218d5-kube-api-access-b5d7t\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.384567 4644 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.384575 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d1b8c9d-b131-4021-893d-ff973a4218d5-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:17 crc kubenswrapper[4644]: E1213 07:04:17.388388 4644 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 13 07:04:17 crc kubenswrapper[4644]: E1213 07:04:17.394130 4644 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 13 07:04:17 crc kubenswrapper[4644]: E1213 07:04:17.395408 4644 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 13 07:04:17 crc kubenswrapper[4644]: E1213 07:04:17.395472 4644 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="9a9cd4cc-b556-46d6-a80c-39b4312abc2a" containerName="nova-scheduler-scheduler" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.472903 4644 generic.go:334] "Generic (PLEG): container finished" podID="9d1b8c9d-b131-4021-893d-ff973a4218d5" containerID="f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33" exitCode=0 Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.472933 4644 generic.go:334] "Generic (PLEG): container finished" podID="9d1b8c9d-b131-4021-893d-ff973a4218d5" containerID="cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394" exitCode=143 Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.472991 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9d1b8c9d-b131-4021-893d-ff973a4218d5","Type":"ContainerDied","Data":"f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33"} Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.473079 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9d1b8c9d-b131-4021-893d-ff973a4218d5","Type":"ContainerDied","Data":"cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394"} Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.473093 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9d1b8c9d-b131-4021-893d-ff973a4218d5","Type":"ContainerDied","Data":"83fd6a6342f6e5902d080fdd5a4b723dc3fb62c69ca14ea682b08af5981c5cab"} Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.473109 4644 scope.go:117] "RemoveContainer" containerID="f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.473252 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.476360 4644 generic.go:334] "Generic (PLEG): container finished" podID="996b5118-5c68-484e-84e4-520e4e895a4d" containerID="e184cd7401aa2a0a7a3725a7d59cf6bef4b45dfa1c20d220b327e6ad646415ea" exitCode=143 Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.476469 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"996b5118-5c68-484e-84e4-520e4e895a4d","Type":"ContainerDied","Data":"e184cd7401aa2a0a7a3725a7d59cf6bef4b45dfa1c20d220b327e6ad646415ea"} Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.495691 4644 scope.go:117] "RemoveContainer" containerID="cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.504886 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.515585 4644 scope.go:117] "RemoveContainer" containerID="f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33" Dec 13 07:04:17 crc kubenswrapper[4644]: E1213 07:04:17.515998 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33\": container with ID starting with f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33 not found: ID does not exist" containerID="f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.516033 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33"} err="failed to get container status \"f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33\": rpc error: code = NotFound desc = could not find container \"f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33\": container with ID starting with f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33 not found: ID does not exist" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.516059 4644 scope.go:117] "RemoveContainer" containerID="cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.516174 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:04:17 crc kubenswrapper[4644]: E1213 07:04:17.516345 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394\": container with ID starting with cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394 not found: ID does not exist" containerID="cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.516376 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394"} err="failed to get container status \"cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394\": rpc error: code = NotFound desc = could not find container \"cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394\": container with ID starting with cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394 not found: ID does not exist" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.516398 4644 scope.go:117] "RemoveContainer" containerID="f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.516925 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33"} err="failed to get container status \"f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33\": rpc error: code = NotFound desc = could not find container \"f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33\": container with ID starting with f43efddc8d08728130d8e3342d8104e7be5541de9a939bdc019c8278d1481f33 not found: ID does not exist" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.516987 4644 scope.go:117] "RemoveContainer" containerID="cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.517465 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394"} err="failed to get container status \"cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394\": rpc error: code = NotFound desc = could not find container \"cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394\": container with ID starting with cb6a3cc3596b704dc1ee2c992df3a8c79992e744a862b86fafffafcaa0d3f394 not found: ID does not exist" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.530372 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 13 07:04:17 crc kubenswrapper[4644]: E1213 07:04:17.530808 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440" containerName="nova-manage" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.530837 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440" containerName="nova-manage" Dec 13 07:04:17 crc kubenswrapper[4644]: E1213 07:04:17.530857 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d1b8c9d-b131-4021-893d-ff973a4218d5" containerName="nova-api-api" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.530865 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d1b8c9d-b131-4021-893d-ff973a4218d5" containerName="nova-api-api" Dec 13 07:04:17 crc kubenswrapper[4644]: E1213 07:04:17.530885 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24878fb4-24ad-4fa3-ad74-edf2b9b64e3c" containerName="init" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.530891 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="24878fb4-24ad-4fa3-ad74-edf2b9b64e3c" containerName="init" Dec 13 07:04:17 crc kubenswrapper[4644]: E1213 07:04:17.530900 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24878fb4-24ad-4fa3-ad74-edf2b9b64e3c" containerName="dnsmasq-dns" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.530907 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="24878fb4-24ad-4fa3-ad74-edf2b9b64e3c" containerName="dnsmasq-dns" Dec 13 07:04:17 crc kubenswrapper[4644]: E1213 07:04:17.530924 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d1b8c9d-b131-4021-893d-ff973a4218d5" containerName="nova-api-log" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.530929 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d1b8c9d-b131-4021-893d-ff973a4218d5" containerName="nova-api-log" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.531110 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="24878fb4-24ad-4fa3-ad74-edf2b9b64e3c" containerName="dnsmasq-dns" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.531120 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d1b8c9d-b131-4021-893d-ff973a4218d5" containerName="nova-api-api" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.531138 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d1b8c9d-b131-4021-893d-ff973a4218d5" containerName="nova-api-log" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.531155 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440" containerName="nova-manage" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.532147 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.534596 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.534885 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.534993 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.548693 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.689370 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-public-tls-certs\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.689459 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-logs\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.689480 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.690018 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-config-data\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.690109 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.690204 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtdgn\" (UniqueName: \"kubernetes.io/projected/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-kube-api-access-xtdgn\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.791903 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.791987 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtdgn\" (UniqueName: \"kubernetes.io/projected/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-kube-api-access-xtdgn\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.792084 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-public-tls-certs\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.792146 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.792170 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-logs\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.792376 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-config-data\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.793006 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-logs\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.796676 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-public-tls-certs\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.796709 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.796838 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-config-data\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.798389 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.810659 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtdgn\" (UniqueName: \"kubernetes.io/projected/0b5d2de7-5524-41a6-a24c-8af21f5a01f0-kube-api-access-xtdgn\") pod \"nova-api-0\" (UID: \"0b5d2de7-5524-41a6-a24c-8af21f5a01f0\") " pod="openstack/nova-api-0" Dec 13 07:04:17 crc kubenswrapper[4644]: I1213 07:04:17.849061 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 13 07:04:18 crc kubenswrapper[4644]: I1213 07:04:18.278988 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 13 07:04:18 crc kubenswrapper[4644]: W1213 07:04:18.281604 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b5d2de7_5524_41a6_a24c_8af21f5a01f0.slice/crio-3c93925d7a2958bf9bceb25e473fe3d1719ec69f0006cb51694d7d867c528c80 WatchSource:0}: Error finding container 3c93925d7a2958bf9bceb25e473fe3d1719ec69f0006cb51694d7d867c528c80: Status 404 returned error can't find the container with id 3c93925d7a2958bf9bceb25e473fe3d1719ec69f0006cb51694d7d867c528c80 Dec 13 07:04:18 crc kubenswrapper[4644]: I1213 07:04:18.406747 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d1b8c9d-b131-4021-893d-ff973a4218d5" path="/var/lib/kubelet/pods/9d1b8c9d-b131-4021-893d-ff973a4218d5/volumes" Dec 13 07:04:18 crc kubenswrapper[4644]: I1213 07:04:18.486683 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0b5d2de7-5524-41a6-a24c-8af21f5a01f0","Type":"ContainerStarted","Data":"33663ae4a1638e81dc48b284f8cf679626308ba50496eb7f4fa720de95c9121e"} Dec 13 07:04:18 crc kubenswrapper[4644]: I1213 07:04:18.486727 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0b5d2de7-5524-41a6-a24c-8af21f5a01f0","Type":"ContainerStarted","Data":"3c93925d7a2958bf9bceb25e473fe3d1719ec69f0006cb51694d7d867c528c80"} Dec 13 07:04:19 crc kubenswrapper[4644]: I1213 07:04:19.495576 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0b5d2de7-5524-41a6-a24c-8af21f5a01f0","Type":"ContainerStarted","Data":"f178cf90e1d44fecaebd09a89aa8bef96e5fae6129560b537bef62dd3d2c9370"} Dec 13 07:04:19 crc kubenswrapper[4644]: I1213 07:04:19.510864 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.510839256 podStartE2EDuration="2.510839256s" podCreationTimestamp="2025-12-13 07:04:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:04:19.509852762 +0000 UTC m=+1121.724803594" watchObservedRunningTime="2025-12-13 07:04:19.510839256 +0000 UTC m=+1121.725790089" Dec 13 07:04:19 crc kubenswrapper[4644]: I1213 07:04:19.797145 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="996b5118-5c68-484e-84e4-520e4e895a4d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.181:8775/\": read tcp 10.217.0.2:50658->10.217.0.181:8775: read: connection reset by peer" Dec 13 07:04:19 crc kubenswrapper[4644]: I1213 07:04:19.797147 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="996b5118-5c68-484e-84e4-520e4e895a4d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.181:8775/\": read tcp 10.217.0.2:50664->10.217.0.181:8775: read: connection reset by peer" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.149174 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.248314 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h75f2\" (UniqueName: \"kubernetes.io/projected/996b5118-5c68-484e-84e4-520e4e895a4d-kube-api-access-h75f2\") pod \"996b5118-5c68-484e-84e4-520e4e895a4d\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.248355 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-combined-ca-bundle\") pod \"996b5118-5c68-484e-84e4-520e4e895a4d\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.248472 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/996b5118-5c68-484e-84e4-520e4e895a4d-logs\") pod \"996b5118-5c68-484e-84e4-520e4e895a4d\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.248518 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-config-data\") pod \"996b5118-5c68-484e-84e4-520e4e895a4d\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.248547 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-nova-metadata-tls-certs\") pod \"996b5118-5c68-484e-84e4-520e4e895a4d\" (UID: \"996b5118-5c68-484e-84e4-520e4e895a4d\") " Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.250124 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/996b5118-5c68-484e-84e4-520e4e895a4d-logs" (OuterVolumeSpecName: "logs") pod "996b5118-5c68-484e-84e4-520e4e895a4d" (UID: "996b5118-5c68-484e-84e4-520e4e895a4d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.255362 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/996b5118-5c68-484e-84e4-520e4e895a4d-kube-api-access-h75f2" (OuterVolumeSpecName: "kube-api-access-h75f2") pod "996b5118-5c68-484e-84e4-520e4e895a4d" (UID: "996b5118-5c68-484e-84e4-520e4e895a4d"). InnerVolumeSpecName "kube-api-access-h75f2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.271091 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-config-data" (OuterVolumeSpecName: "config-data") pod "996b5118-5c68-484e-84e4-520e4e895a4d" (UID: "996b5118-5c68-484e-84e4-520e4e895a4d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.272016 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "996b5118-5c68-484e-84e4-520e4e895a4d" (UID: "996b5118-5c68-484e-84e4-520e4e895a4d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.284886 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "996b5118-5c68-484e-84e4-520e4e895a4d" (UID: "996b5118-5c68-484e-84e4-520e4e895a4d"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.358680 4644 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.358716 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h75f2\" (UniqueName: \"kubernetes.io/projected/996b5118-5c68-484e-84e4-520e4e895a4d-kube-api-access-h75f2\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.358726 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.358734 4644 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/996b5118-5c68-484e-84e4-520e4e895a4d-logs\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.358742 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/996b5118-5c68-484e-84e4-520e4e895a4d-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.504548 4644 generic.go:334] "Generic (PLEG): container finished" podID="996b5118-5c68-484e-84e4-520e4e895a4d" containerID="188fa5495797aaea18d0cb461e779a677bef6faabdae10a28c2eab1a8788e1b9" exitCode=0 Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.504631 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.504672 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"996b5118-5c68-484e-84e4-520e4e895a4d","Type":"ContainerDied","Data":"188fa5495797aaea18d0cb461e779a677bef6faabdae10a28c2eab1a8788e1b9"} Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.504717 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"996b5118-5c68-484e-84e4-520e4e895a4d","Type":"ContainerDied","Data":"fe384c1357bb5062ccef64f25cf13079209f610e159ae1732c48c3dbaa18921b"} Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.504737 4644 scope.go:117] "RemoveContainer" containerID="188fa5495797aaea18d0cb461e779a677bef6faabdae10a28c2eab1a8788e1b9" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.532087 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.532612 4644 scope.go:117] "RemoveContainer" containerID="e184cd7401aa2a0a7a3725a7d59cf6bef4b45dfa1c20d220b327e6ad646415ea" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.542214 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.548747 4644 scope.go:117] "RemoveContainer" containerID="188fa5495797aaea18d0cb461e779a677bef6faabdae10a28c2eab1a8788e1b9" Dec 13 07:04:20 crc kubenswrapper[4644]: E1213 07:04:20.549157 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"188fa5495797aaea18d0cb461e779a677bef6faabdae10a28c2eab1a8788e1b9\": container with ID starting with 188fa5495797aaea18d0cb461e779a677bef6faabdae10a28c2eab1a8788e1b9 not found: ID does not exist" containerID="188fa5495797aaea18d0cb461e779a677bef6faabdae10a28c2eab1a8788e1b9" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.549195 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"188fa5495797aaea18d0cb461e779a677bef6faabdae10a28c2eab1a8788e1b9"} err="failed to get container status \"188fa5495797aaea18d0cb461e779a677bef6faabdae10a28c2eab1a8788e1b9\": rpc error: code = NotFound desc = could not find container \"188fa5495797aaea18d0cb461e779a677bef6faabdae10a28c2eab1a8788e1b9\": container with ID starting with 188fa5495797aaea18d0cb461e779a677bef6faabdae10a28c2eab1a8788e1b9 not found: ID does not exist" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.549216 4644 scope.go:117] "RemoveContainer" containerID="e184cd7401aa2a0a7a3725a7d59cf6bef4b45dfa1c20d220b327e6ad646415ea" Dec 13 07:04:20 crc kubenswrapper[4644]: E1213 07:04:20.549481 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e184cd7401aa2a0a7a3725a7d59cf6bef4b45dfa1c20d220b327e6ad646415ea\": container with ID starting with e184cd7401aa2a0a7a3725a7d59cf6bef4b45dfa1c20d220b327e6ad646415ea not found: ID does not exist" containerID="e184cd7401aa2a0a7a3725a7d59cf6bef4b45dfa1c20d220b327e6ad646415ea" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.549504 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e184cd7401aa2a0a7a3725a7d59cf6bef4b45dfa1c20d220b327e6ad646415ea"} err="failed to get container status \"e184cd7401aa2a0a7a3725a7d59cf6bef4b45dfa1c20d220b327e6ad646415ea\": rpc error: code = NotFound desc = could not find container \"e184cd7401aa2a0a7a3725a7d59cf6bef4b45dfa1c20d220b327e6ad646415ea\": container with ID starting with e184cd7401aa2a0a7a3725a7d59cf6bef4b45dfa1c20d220b327e6ad646415ea not found: ID does not exist" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.560869 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:04:20 crc kubenswrapper[4644]: E1213 07:04:20.561240 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="996b5118-5c68-484e-84e4-520e4e895a4d" containerName="nova-metadata-metadata" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.561260 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="996b5118-5c68-484e-84e4-520e4e895a4d" containerName="nova-metadata-metadata" Dec 13 07:04:20 crc kubenswrapper[4644]: E1213 07:04:20.561287 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="996b5118-5c68-484e-84e4-520e4e895a4d" containerName="nova-metadata-log" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.561293 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="996b5118-5c68-484e-84e4-520e4e895a4d" containerName="nova-metadata-log" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.561481 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="996b5118-5c68-484e-84e4-520e4e895a4d" containerName="nova-metadata-metadata" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.561497 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="996b5118-5c68-484e-84e4-520e4e895a4d" containerName="nova-metadata-log" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.562339 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.565604 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.565853 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.578092 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.664497 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf31dcb5-e753-4f68-aca2-39815bdf203d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"bf31dcb5-e753-4f68-aca2-39815bdf203d\") " pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.664554 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf31dcb5-e753-4f68-aca2-39815bdf203d-config-data\") pod \"nova-metadata-0\" (UID: \"bf31dcb5-e753-4f68-aca2-39815bdf203d\") " pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.664663 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znb9n\" (UniqueName: \"kubernetes.io/projected/bf31dcb5-e753-4f68-aca2-39815bdf203d-kube-api-access-znb9n\") pod \"nova-metadata-0\" (UID: \"bf31dcb5-e753-4f68-aca2-39815bdf203d\") " pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.664723 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf31dcb5-e753-4f68-aca2-39815bdf203d-logs\") pod \"nova-metadata-0\" (UID: \"bf31dcb5-e753-4f68-aca2-39815bdf203d\") " pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.664795 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf31dcb5-e753-4f68-aca2-39815bdf203d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"bf31dcb5-e753-4f68-aca2-39815bdf203d\") " pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.766271 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znb9n\" (UniqueName: \"kubernetes.io/projected/bf31dcb5-e753-4f68-aca2-39815bdf203d-kube-api-access-znb9n\") pod \"nova-metadata-0\" (UID: \"bf31dcb5-e753-4f68-aca2-39815bdf203d\") " pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.766366 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf31dcb5-e753-4f68-aca2-39815bdf203d-logs\") pod \"nova-metadata-0\" (UID: \"bf31dcb5-e753-4f68-aca2-39815bdf203d\") " pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.766506 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf31dcb5-e753-4f68-aca2-39815bdf203d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"bf31dcb5-e753-4f68-aca2-39815bdf203d\") " pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.766679 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf31dcb5-e753-4f68-aca2-39815bdf203d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"bf31dcb5-e753-4f68-aca2-39815bdf203d\") " pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.766715 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf31dcb5-e753-4f68-aca2-39815bdf203d-config-data\") pod \"nova-metadata-0\" (UID: \"bf31dcb5-e753-4f68-aca2-39815bdf203d\") " pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.766765 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf31dcb5-e753-4f68-aca2-39815bdf203d-logs\") pod \"nova-metadata-0\" (UID: \"bf31dcb5-e753-4f68-aca2-39815bdf203d\") " pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.770060 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf31dcb5-e753-4f68-aca2-39815bdf203d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"bf31dcb5-e753-4f68-aca2-39815bdf203d\") " pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.771118 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf31dcb5-e753-4f68-aca2-39815bdf203d-config-data\") pod \"nova-metadata-0\" (UID: \"bf31dcb5-e753-4f68-aca2-39815bdf203d\") " pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.772117 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf31dcb5-e753-4f68-aca2-39815bdf203d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"bf31dcb5-e753-4f68-aca2-39815bdf203d\") " pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.782997 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znb9n\" (UniqueName: \"kubernetes.io/projected/bf31dcb5-e753-4f68-aca2-39815bdf203d-kube-api-access-znb9n\") pod \"nova-metadata-0\" (UID: \"bf31dcb5-e753-4f68-aca2-39815bdf203d\") " pod="openstack/nova-metadata-0" Dec 13 07:04:20 crc kubenswrapper[4644]: I1213 07:04:20.877906 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.061729 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.173019 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-config-data\") pod \"9a9cd4cc-b556-46d6-a80c-39b4312abc2a\" (UID: \"9a9cd4cc-b556-46d6-a80c-39b4312abc2a\") " Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.173143 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9fp6\" (UniqueName: \"kubernetes.io/projected/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-kube-api-access-k9fp6\") pod \"9a9cd4cc-b556-46d6-a80c-39b4312abc2a\" (UID: \"9a9cd4cc-b556-46d6-a80c-39b4312abc2a\") " Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.173273 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-combined-ca-bundle\") pod \"9a9cd4cc-b556-46d6-a80c-39b4312abc2a\" (UID: \"9a9cd4cc-b556-46d6-a80c-39b4312abc2a\") " Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.178241 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-kube-api-access-k9fp6" (OuterVolumeSpecName: "kube-api-access-k9fp6") pod "9a9cd4cc-b556-46d6-a80c-39b4312abc2a" (UID: "9a9cd4cc-b556-46d6-a80c-39b4312abc2a"). InnerVolumeSpecName "kube-api-access-k9fp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.197788 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9a9cd4cc-b556-46d6-a80c-39b4312abc2a" (UID: "9a9cd4cc-b556-46d6-a80c-39b4312abc2a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.206823 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-config-data" (OuterVolumeSpecName: "config-data") pod "9a9cd4cc-b556-46d6-a80c-39b4312abc2a" (UID: "9a9cd4cc-b556-46d6-a80c-39b4312abc2a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.276251 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9fp6\" (UniqueName: \"kubernetes.io/projected/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-kube-api-access-k9fp6\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.276283 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.276292 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a9cd4cc-b556-46d6-a80c-39b4312abc2a-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.300887 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 13 07:04:21 crc kubenswrapper[4644]: W1213 07:04:21.305213 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf31dcb5_e753_4f68_aca2_39815bdf203d.slice/crio-f937171a8fb73470bfd84bfe9044ca8c026472dd23f6c464753f47eb080b2cae WatchSource:0}: Error finding container f937171a8fb73470bfd84bfe9044ca8c026472dd23f6c464753f47eb080b2cae: Status 404 returned error can't find the container with id f937171a8fb73470bfd84bfe9044ca8c026472dd23f6c464753f47eb080b2cae Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.515871 4644 generic.go:334] "Generic (PLEG): container finished" podID="9a9cd4cc-b556-46d6-a80c-39b4312abc2a" containerID="f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2" exitCode=0 Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.515945 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.515955 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9a9cd4cc-b556-46d6-a80c-39b4312abc2a","Type":"ContainerDied","Data":"f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2"} Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.516298 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9a9cd4cc-b556-46d6-a80c-39b4312abc2a","Type":"ContainerDied","Data":"a0edb6fa1afe5c8fa149537b7aacaac5adf06de94f61f4ee972cd8f21f6fe1f6"} Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.516322 4644 scope.go:117] "RemoveContainer" containerID="f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.519228 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"bf31dcb5-e753-4f68-aca2-39815bdf203d","Type":"ContainerStarted","Data":"0b7fd6fa1e92b8b4a52a10931f9a515fcfbcc526be28cc176551cc4e93621dcc"} Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.519288 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"bf31dcb5-e753-4f68-aca2-39815bdf203d","Type":"ContainerStarted","Data":"f937171a8fb73470bfd84bfe9044ca8c026472dd23f6c464753f47eb080b2cae"} Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.541707 4644 scope.go:117] "RemoveContainer" containerID="f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2" Dec 13 07:04:21 crc kubenswrapper[4644]: E1213 07:04:21.543996 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2\": container with ID starting with f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2 not found: ID does not exist" containerID="f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.544039 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2"} err="failed to get container status \"f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2\": rpc error: code = NotFound desc = could not find container \"f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2\": container with ID starting with f30c46c28d08ef3780f6dddb1a5fe092cea7bfa6826c087e457ac2ccc06ee8d2 not found: ID does not exist" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.547199 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.554364 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.562368 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 07:04:21 crc kubenswrapper[4644]: E1213 07:04:21.562770 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a9cd4cc-b556-46d6-a80c-39b4312abc2a" containerName="nova-scheduler-scheduler" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.562788 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a9cd4cc-b556-46d6-a80c-39b4312abc2a" containerName="nova-scheduler-scheduler" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.563000 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a9cd4cc-b556-46d6-a80c-39b4312abc2a" containerName="nova-scheduler-scheduler" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.563602 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.566726 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.571811 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.682501 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swjfc\" (UniqueName: \"kubernetes.io/projected/3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c-kube-api-access-swjfc\") pod \"nova-scheduler-0\" (UID: \"3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c\") " pod="openstack/nova-scheduler-0" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.682614 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c-config-data\") pod \"nova-scheduler-0\" (UID: \"3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c\") " pod="openstack/nova-scheduler-0" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.682724 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c\") " pod="openstack/nova-scheduler-0" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.784922 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c-config-data\") pod \"nova-scheduler-0\" (UID: \"3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c\") " pod="openstack/nova-scheduler-0" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.785054 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c\") " pod="openstack/nova-scheduler-0" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.785220 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swjfc\" (UniqueName: \"kubernetes.io/projected/3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c-kube-api-access-swjfc\") pod \"nova-scheduler-0\" (UID: \"3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c\") " pod="openstack/nova-scheduler-0" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.789047 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c\") " pod="openstack/nova-scheduler-0" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.789124 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c-config-data\") pod \"nova-scheduler-0\" (UID: \"3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c\") " pod="openstack/nova-scheduler-0" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.800399 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swjfc\" (UniqueName: \"kubernetes.io/projected/3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c-kube-api-access-swjfc\") pod \"nova-scheduler-0\" (UID: \"3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c\") " pod="openstack/nova-scheduler-0" Dec 13 07:04:21 crc kubenswrapper[4644]: I1213 07:04:21.878205 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 13 07:04:22 crc kubenswrapper[4644]: I1213 07:04:22.265164 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 13 07:04:22 crc kubenswrapper[4644]: W1213 07:04:22.268733 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3eec27e2_843c_4cb9_bf6a_4c4c01cbec9c.slice/crio-1d5a091abf4d86029089c560cf5ac3016aa2979b2d59b7ddbefdd28b4f53ec35 WatchSource:0}: Error finding container 1d5a091abf4d86029089c560cf5ac3016aa2979b2d59b7ddbefdd28b4f53ec35: Status 404 returned error can't find the container with id 1d5a091abf4d86029089c560cf5ac3016aa2979b2d59b7ddbefdd28b4f53ec35 Dec 13 07:04:22 crc kubenswrapper[4644]: I1213 07:04:22.397782 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="996b5118-5c68-484e-84e4-520e4e895a4d" path="/var/lib/kubelet/pods/996b5118-5c68-484e-84e4-520e4e895a4d/volumes" Dec 13 07:04:22 crc kubenswrapper[4644]: I1213 07:04:22.398500 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a9cd4cc-b556-46d6-a80c-39b4312abc2a" path="/var/lib/kubelet/pods/9a9cd4cc-b556-46d6-a80c-39b4312abc2a/volumes" Dec 13 07:04:22 crc kubenswrapper[4644]: I1213 07:04:22.531840 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"bf31dcb5-e753-4f68-aca2-39815bdf203d","Type":"ContainerStarted","Data":"49e978b04178f81b2d0a4ad74f2bd40c5f8064454092a30489d3993a710bd4c9"} Dec 13 07:04:22 crc kubenswrapper[4644]: I1213 07:04:22.534128 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c","Type":"ContainerStarted","Data":"37d0d5a45413ec47bb58e29ffee0496c3d545ad75da9ea7635418cbb157e34b8"} Dec 13 07:04:22 crc kubenswrapper[4644]: I1213 07:04:22.534173 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c","Type":"ContainerStarted","Data":"1d5a091abf4d86029089c560cf5ac3016aa2979b2d59b7ddbefdd28b4f53ec35"} Dec 13 07:04:22 crc kubenswrapper[4644]: I1213 07:04:22.554686 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.554671463 podStartE2EDuration="2.554671463s" podCreationTimestamp="2025-12-13 07:04:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:04:22.547113769 +0000 UTC m=+1124.762064602" watchObservedRunningTime="2025-12-13 07:04:22.554671463 +0000 UTC m=+1124.769622296" Dec 13 07:04:22 crc kubenswrapper[4644]: I1213 07:04:22.562563 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.562546004 podStartE2EDuration="1.562546004s" podCreationTimestamp="2025-12-13 07:04:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:04:22.55811224 +0000 UTC m=+1124.773063074" watchObservedRunningTime="2025-12-13 07:04:22.562546004 +0000 UTC m=+1124.777496837" Dec 13 07:04:25 crc kubenswrapper[4644]: I1213 07:04:25.878774 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 13 07:04:25 crc kubenswrapper[4644]: I1213 07:04:25.880065 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 13 07:04:26 crc kubenswrapper[4644]: I1213 07:04:26.878550 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 13 07:04:27 crc kubenswrapper[4644]: I1213 07:04:27.849422 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 13 07:04:27 crc kubenswrapper[4644]: I1213 07:04:27.849770 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 13 07:04:28 crc kubenswrapper[4644]: I1213 07:04:28.865618 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0b5d2de7-5524-41a6-a24c-8af21f5a01f0" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 13 07:04:28 crc kubenswrapper[4644]: I1213 07:04:28.865582 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0b5d2de7-5524-41a6-a24c-8af21f5a01f0" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.191:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 13 07:04:30 crc kubenswrapper[4644]: I1213 07:04:30.878966 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 13 07:04:30 crc kubenswrapper[4644]: I1213 07:04:30.879419 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 13 07:04:31 crc kubenswrapper[4644]: I1213 07:04:31.878627 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 13 07:04:31 crc kubenswrapper[4644]: I1213 07:04:31.893542 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="bf31dcb5-e753-4f68-aca2-39815bdf203d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.192:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 13 07:04:31 crc kubenswrapper[4644]: I1213 07:04:31.893541 4644 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="bf31dcb5-e753-4f68-aca2-39815bdf203d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.192:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 13 07:04:31 crc kubenswrapper[4644]: I1213 07:04:31.900995 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 13 07:04:32 crc kubenswrapper[4644]: I1213 07:04:32.627117 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 13 07:04:37 crc kubenswrapper[4644]: I1213 07:04:37.864932 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 13 07:04:37 crc kubenswrapper[4644]: I1213 07:04:37.866503 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 13 07:04:37 crc kubenswrapper[4644]: I1213 07:04:37.873784 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 13 07:04:37 crc kubenswrapper[4644]: I1213 07:04:37.876428 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 13 07:04:37 crc kubenswrapper[4644]: I1213 07:04:37.939972 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 13 07:04:38 crc kubenswrapper[4644]: I1213 07:04:38.648387 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 13 07:04:38 crc kubenswrapper[4644]: I1213 07:04:38.664985 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 13 07:04:39 crc kubenswrapper[4644]: I1213 07:04:39.753879 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:04:39 crc kubenswrapper[4644]: I1213 07:04:39.754152 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:04:39 crc kubenswrapper[4644]: I1213 07:04:39.754198 4644 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 07:04:39 crc kubenswrapper[4644]: I1213 07:04:39.754964 4644 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e41b9eff015e4f806befa3edcbb8d5a320f89b209722141f8d73fdf31b010f61"} pod="openshift-machine-config-operator/machine-config-daemon-45tj4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 07:04:39 crc kubenswrapper[4644]: I1213 07:04:39.755021 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" containerID="cri-o://e41b9eff015e4f806befa3edcbb8d5a320f89b209722141f8d73fdf31b010f61" gracePeriod=600 Dec 13 07:04:40 crc kubenswrapper[4644]: I1213 07:04:40.664807 4644 generic.go:334] "Generic (PLEG): container finished" podID="48240f19-087e-4597-b448-ab1a190a5027" containerID="e41b9eff015e4f806befa3edcbb8d5a320f89b209722141f8d73fdf31b010f61" exitCode=0 Dec 13 07:04:40 crc kubenswrapper[4644]: I1213 07:04:40.664867 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerDied","Data":"e41b9eff015e4f806befa3edcbb8d5a320f89b209722141f8d73fdf31b010f61"} Dec 13 07:04:40 crc kubenswrapper[4644]: I1213 07:04:40.665512 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerStarted","Data":"c27c345ea5c1252e6754fce190bc5091a29be12a2dec59380a2edf28ec34cb52"} Dec 13 07:04:40 crc kubenswrapper[4644]: I1213 07:04:40.665538 4644 scope.go:117] "RemoveContainer" containerID="5e6f928015ce5189200b70ce1217403ef17ccffc81e2b7876249680646ea25ba" Dec 13 07:04:40 crc kubenswrapper[4644]: I1213 07:04:40.904797 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 13 07:04:40 crc kubenswrapper[4644]: I1213 07:04:40.909957 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 13 07:04:40 crc kubenswrapper[4644]: I1213 07:04:40.910490 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 13 07:04:41 crc kubenswrapper[4644]: I1213 07:04:41.683160 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 13 07:04:48 crc kubenswrapper[4644]: I1213 07:04:48.235318 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 07:04:49 crc kubenswrapper[4644]: I1213 07:04:49.110197 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 07:04:51 crc kubenswrapper[4644]: I1213 07:04:51.812692 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="0dd20500-a2dd-4608-a3c8-7d714ffb09c4" containerName="rabbitmq" containerID="cri-o://0c64eda3372ed53fa4829f56319a85976b6aeb35dd760f5102f3c2ed0ad0c74b" gracePeriod=604797 Dec 13 07:04:52 crc kubenswrapper[4644]: I1213 07:04:52.623176 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="706c4700-9d13-4bac-b7ac-0c19c09cc7e7" containerName="rabbitmq" containerID="cri-o://c99ebc148d695f48ca828e7137824a804d83c640f5fd71ecf18ddcbe3253fa5a" gracePeriod=604797 Dec 13 07:04:55 crc kubenswrapper[4644]: I1213 07:04:55.618187 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="0dd20500-a2dd-4608-a3c8-7d714ffb09c4" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.95:5671: connect: connection refused" Dec 13 07:04:55 crc kubenswrapper[4644]: I1213 07:04:55.914987 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="706c4700-9d13-4bac-b7ac-0c19c09cc7e7" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.96:5671: connect: connection refused" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.215394 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.294148 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-plugins\") pod \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.294240 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-tls\") pod \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.294283 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-erlang-cookie\") pod \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.294316 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.294341 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-config-data\") pod \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.294405 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-server-conf\") pod \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.294477 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-pod-info\") pod \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.294498 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-plugins-conf\") pod \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.294522 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkc2n\" (UniqueName: \"kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-kube-api-access-xkc2n\") pod \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.294596 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-confd\") pod \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.294626 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-erlang-cookie-secret\") pod \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\" (UID: \"0dd20500-a2dd-4608-a3c8-7d714ffb09c4\") " Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.297181 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "0dd20500-a2dd-4608-a3c8-7d714ffb09c4" (UID: "0dd20500-a2dd-4608-a3c8-7d714ffb09c4"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.302502 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "0dd20500-a2dd-4608-a3c8-7d714ffb09c4" (UID: "0dd20500-a2dd-4608-a3c8-7d714ffb09c4"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.303029 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "0dd20500-a2dd-4608-a3c8-7d714ffb09c4" (UID: "0dd20500-a2dd-4608-a3c8-7d714ffb09c4"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.303112 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "0dd20500-a2dd-4608-a3c8-7d714ffb09c4" (UID: "0dd20500-a2dd-4608-a3c8-7d714ffb09c4"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.308924 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "0dd20500-a2dd-4608-a3c8-7d714ffb09c4" (UID: "0dd20500-a2dd-4608-a3c8-7d714ffb09c4"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.312302 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "0dd20500-a2dd-4608-a3c8-7d714ffb09c4" (UID: "0dd20500-a2dd-4608-a3c8-7d714ffb09c4"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.325459 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-kube-api-access-xkc2n" (OuterVolumeSpecName: "kube-api-access-xkc2n") pod "0dd20500-a2dd-4608-a3c8-7d714ffb09c4" (UID: "0dd20500-a2dd-4608-a3c8-7d714ffb09c4"). InnerVolumeSpecName "kube-api-access-xkc2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.344594 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-pod-info" (OuterVolumeSpecName: "pod-info") pod "0dd20500-a2dd-4608-a3c8-7d714ffb09c4" (UID: "0dd20500-a2dd-4608-a3c8-7d714ffb09c4"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.362084 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-config-data" (OuterVolumeSpecName: "config-data") pod "0dd20500-a2dd-4608-a3c8-7d714ffb09c4" (UID: "0dd20500-a2dd-4608-a3c8-7d714ffb09c4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.398313 4644 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.398346 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.398356 4644 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-pod-info\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.398365 4644 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.398376 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkc2n\" (UniqueName: \"kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-kube-api-access-xkc2n\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.398385 4644 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.398395 4644 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.398403 4644 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.398411 4644 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.401124 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-server-conf" (OuterVolumeSpecName: "server-conf") pod "0dd20500-a2dd-4608-a3c8-7d714ffb09c4" (UID: "0dd20500-a2dd-4608-a3c8-7d714ffb09c4"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.432708 4644 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.477760 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "0dd20500-a2dd-4608-a3c8-7d714ffb09c4" (UID: "0dd20500-a2dd-4608-a3c8-7d714ffb09c4"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.500792 4644 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.501137 4644 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.501150 4644 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0dd20500-a2dd-4608-a3c8-7d714ffb09c4-server-conf\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.823601 4644 generic.go:334] "Generic (PLEG): container finished" podID="0dd20500-a2dd-4608-a3c8-7d714ffb09c4" containerID="0c64eda3372ed53fa4829f56319a85976b6aeb35dd760f5102f3c2ed0ad0c74b" exitCode=0 Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.823678 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0dd20500-a2dd-4608-a3c8-7d714ffb09c4","Type":"ContainerDied","Data":"0c64eda3372ed53fa4829f56319a85976b6aeb35dd760f5102f3c2ed0ad0c74b"} Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.823710 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0dd20500-a2dd-4608-a3c8-7d714ffb09c4","Type":"ContainerDied","Data":"03a2bd297f67a38c7d89c909e9cbdaba5a92f8a3dcbe471c907ac364eded75db"} Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.823729 4644 scope.go:117] "RemoveContainer" containerID="0c64eda3372ed53fa4829f56319a85976b6aeb35dd760f5102f3c2ed0ad0c74b" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.823892 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.829133 4644 generic.go:334] "Generic (PLEG): container finished" podID="706c4700-9d13-4bac-b7ac-0c19c09cc7e7" containerID="c99ebc148d695f48ca828e7137824a804d83c640f5fd71ecf18ddcbe3253fa5a" exitCode=0 Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.829207 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"706c4700-9d13-4bac-b7ac-0c19c09cc7e7","Type":"ContainerDied","Data":"c99ebc148d695f48ca828e7137824a804d83c640f5fd71ecf18ddcbe3253fa5a"} Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.868301 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.874945 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.879277 4644 scope.go:117] "RemoveContainer" containerID="7fb838eb12d8d62aadffa1f046880d9513f421e1f29a5d58016251c007b5e95d" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.904423 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 07:04:58 crc kubenswrapper[4644]: E1213 07:04:58.905287 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dd20500-a2dd-4608-a3c8-7d714ffb09c4" containerName="rabbitmq" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.905324 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dd20500-a2dd-4608-a3c8-7d714ffb09c4" containerName="rabbitmq" Dec 13 07:04:58 crc kubenswrapper[4644]: E1213 07:04:58.905365 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dd20500-a2dd-4608-a3c8-7d714ffb09c4" containerName="setup-container" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.905371 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dd20500-a2dd-4608-a3c8-7d714ffb09c4" containerName="setup-container" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.905635 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dd20500-a2dd-4608-a3c8-7d714ffb09c4" containerName="rabbitmq" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.907124 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.913050 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.913305 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.913504 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.913603 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.913654 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.913767 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.914396 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-xw827" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.935513 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.939225 4644 scope.go:117] "RemoveContainer" containerID="0c64eda3372ed53fa4829f56319a85976b6aeb35dd760f5102f3c2ed0ad0c74b" Dec 13 07:04:58 crc kubenswrapper[4644]: E1213 07:04:58.939714 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c64eda3372ed53fa4829f56319a85976b6aeb35dd760f5102f3c2ed0ad0c74b\": container with ID starting with 0c64eda3372ed53fa4829f56319a85976b6aeb35dd760f5102f3c2ed0ad0c74b not found: ID does not exist" containerID="0c64eda3372ed53fa4829f56319a85976b6aeb35dd760f5102f3c2ed0ad0c74b" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.939750 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c64eda3372ed53fa4829f56319a85976b6aeb35dd760f5102f3c2ed0ad0c74b"} err="failed to get container status \"0c64eda3372ed53fa4829f56319a85976b6aeb35dd760f5102f3c2ed0ad0c74b\": rpc error: code = NotFound desc = could not find container \"0c64eda3372ed53fa4829f56319a85976b6aeb35dd760f5102f3c2ed0ad0c74b\": container with ID starting with 0c64eda3372ed53fa4829f56319a85976b6aeb35dd760f5102f3c2ed0ad0c74b not found: ID does not exist" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.939777 4644 scope.go:117] "RemoveContainer" containerID="7fb838eb12d8d62aadffa1f046880d9513f421e1f29a5d58016251c007b5e95d" Dec 13 07:04:58 crc kubenswrapper[4644]: E1213 07:04:58.940230 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fb838eb12d8d62aadffa1f046880d9513f421e1f29a5d58016251c007b5e95d\": container with ID starting with 7fb838eb12d8d62aadffa1f046880d9513f421e1f29a5d58016251c007b5e95d not found: ID does not exist" containerID="7fb838eb12d8d62aadffa1f046880d9513f421e1f29a5d58016251c007b5e95d" Dec 13 07:04:58 crc kubenswrapper[4644]: I1213 07:04:58.940247 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fb838eb12d8d62aadffa1f046880d9513f421e1f29a5d58016251c007b5e95d"} err="failed to get container status \"7fb838eb12d8d62aadffa1f046880d9513f421e1f29a5d58016251c007b5e95d\": rpc error: code = NotFound desc = could not find container \"7fb838eb12d8d62aadffa1f046880d9513f421e1f29a5d58016251c007b5e95d\": container with ID starting with 7fb838eb12d8d62aadffa1f046880d9513f421e1f29a5d58016251c007b5e95d not found: ID does not exist" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.014806 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/78c20695-df2a-4d1d-a8e8-4dc7817b5803-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.014891 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/78c20695-df2a-4d1d-a8e8-4dc7817b5803-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.014936 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/78c20695-df2a-4d1d-a8e8-4dc7817b5803-server-conf\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.014979 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/78c20695-df2a-4d1d-a8e8-4dc7817b5803-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.015112 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/78c20695-df2a-4d1d-a8e8-4dc7817b5803-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.015255 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/78c20695-df2a-4d1d-a8e8-4dc7817b5803-pod-info\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.015475 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/78c20695-df2a-4d1d-a8e8-4dc7817b5803-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.015563 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.015642 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-669x5\" (UniqueName: \"kubernetes.io/projected/78c20695-df2a-4d1d-a8e8-4dc7817b5803-kube-api-access-669x5\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.015723 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/78c20695-df2a-4d1d-a8e8-4dc7817b5803-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.015856 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78c20695-df2a-4d1d-a8e8-4dc7817b5803-config-data\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.056048 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.117260 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-669x5\" (UniqueName: \"kubernetes.io/projected/78c20695-df2a-4d1d-a8e8-4dc7817b5803-kube-api-access-669x5\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.117335 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/78c20695-df2a-4d1d-a8e8-4dc7817b5803-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.117397 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78c20695-df2a-4d1d-a8e8-4dc7817b5803-config-data\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.117515 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/78c20695-df2a-4d1d-a8e8-4dc7817b5803-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.117546 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/78c20695-df2a-4d1d-a8e8-4dc7817b5803-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.117573 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/78c20695-df2a-4d1d-a8e8-4dc7817b5803-server-conf\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.117612 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/78c20695-df2a-4d1d-a8e8-4dc7817b5803-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.117639 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/78c20695-df2a-4d1d-a8e8-4dc7817b5803-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.117682 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/78c20695-df2a-4d1d-a8e8-4dc7817b5803-pod-info\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.117742 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/78c20695-df2a-4d1d-a8e8-4dc7817b5803-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.117770 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.117985 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/78c20695-df2a-4d1d-a8e8-4dc7817b5803-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.118038 4644 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.118462 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/78c20695-df2a-4d1d-a8e8-4dc7817b5803-config-data\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.118495 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/78c20695-df2a-4d1d-a8e8-4dc7817b5803-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.118752 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/78c20695-df2a-4d1d-a8e8-4dc7817b5803-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.118997 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/78c20695-df2a-4d1d-a8e8-4dc7817b5803-server-conf\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.125113 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/78c20695-df2a-4d1d-a8e8-4dc7817b5803-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.127134 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/78c20695-df2a-4d1d-a8e8-4dc7817b5803-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.127741 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/78c20695-df2a-4d1d-a8e8-4dc7817b5803-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.143227 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-669x5\" (UniqueName: \"kubernetes.io/projected/78c20695-df2a-4d1d-a8e8-4dc7817b5803-kube-api-access-669x5\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.150189 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/78c20695-df2a-4d1d-a8e8-4dc7817b5803-pod-info\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.176778 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"78c20695-df2a-4d1d-a8e8-4dc7817b5803\") " pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.218979 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-plugins\") pod \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.219037 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-plugins-conf\") pod \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.219064 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-erlang-cookie-secret\") pod \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.219083 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.219140 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74lzh\" (UniqueName: \"kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-kube-api-access-74lzh\") pod \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.219165 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-server-conf\") pod \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.219187 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-pod-info\") pod \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.219239 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-erlang-cookie\") pod \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.219274 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-tls\") pod \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.219309 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-config-data\") pod \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.219326 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-confd\") pod \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\" (UID: \"706c4700-9d13-4bac-b7ac-0c19c09cc7e7\") " Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.220601 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "706c4700-9d13-4bac-b7ac-0c19c09cc7e7" (UID: "706c4700-9d13-4bac-b7ac-0c19c09cc7e7"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.220606 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "706c4700-9d13-4bac-b7ac-0c19c09cc7e7" (UID: "706c4700-9d13-4bac-b7ac-0c19c09cc7e7"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.220966 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "706c4700-9d13-4bac-b7ac-0c19c09cc7e7" (UID: "706c4700-9d13-4bac-b7ac-0c19c09cc7e7"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.223778 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-kube-api-access-74lzh" (OuterVolumeSpecName: "kube-api-access-74lzh") pod "706c4700-9d13-4bac-b7ac-0c19c09cc7e7" (UID: "706c4700-9d13-4bac-b7ac-0c19c09cc7e7"). InnerVolumeSpecName "kube-api-access-74lzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.224056 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "706c4700-9d13-4bac-b7ac-0c19c09cc7e7" (UID: "706c4700-9d13-4bac-b7ac-0c19c09cc7e7"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.224363 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "persistence") pod "706c4700-9d13-4bac-b7ac-0c19c09cc7e7" (UID: "706c4700-9d13-4bac-b7ac-0c19c09cc7e7"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.224513 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-pod-info" (OuterVolumeSpecName: "pod-info") pod "706c4700-9d13-4bac-b7ac-0c19c09cc7e7" (UID: "706c4700-9d13-4bac-b7ac-0c19c09cc7e7"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.224857 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "706c4700-9d13-4bac-b7ac-0c19c09cc7e7" (UID: "706c4700-9d13-4bac-b7ac-0c19c09cc7e7"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.242804 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.257998 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-config-data" (OuterVolumeSpecName: "config-data") pod "706c4700-9d13-4bac-b7ac-0c19c09cc7e7" (UID: "706c4700-9d13-4bac-b7ac-0c19c09cc7e7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.267673 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-server-conf" (OuterVolumeSpecName: "server-conf") pod "706c4700-9d13-4bac-b7ac-0c19c09cc7e7" (UID: "706c4700-9d13-4bac-b7ac-0c19c09cc7e7"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.301711 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "706c4700-9d13-4bac-b7ac-0c19c09cc7e7" (UID: "706c4700-9d13-4bac-b7ac-0c19c09cc7e7"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.321699 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74lzh\" (UniqueName: \"kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-kube-api-access-74lzh\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.321731 4644 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-server-conf\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.321743 4644 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-pod-info\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.321756 4644 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.321765 4644 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.321774 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.321783 4644 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.321791 4644 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.321799 4644 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.321807 4644 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/706c4700-9d13-4bac-b7ac-0c19c09cc7e7-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.321853 4644 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.340487 4644 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.423872 4644 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.678321 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.839603 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.839618 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"706c4700-9d13-4bac-b7ac-0c19c09cc7e7","Type":"ContainerDied","Data":"b9120b4bf5edb948f9978d94d2a77c11f5e00d30e31116718e029c5702f6c1e7"} Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.839995 4644 scope.go:117] "RemoveContainer" containerID="c99ebc148d695f48ca828e7137824a804d83c640f5fd71ecf18ddcbe3253fa5a" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.849247 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"78c20695-df2a-4d1d-a8e8-4dc7817b5803","Type":"ContainerStarted","Data":"85044c05b4645b9a54a8daeea67e181fda282acebcd30bce74e0b5cbd6fed62f"} Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.868054 4644 scope.go:117] "RemoveContainer" containerID="70a5774436468fb133077a60d251233d249cc290e92dd10a84f76c87a4876089" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.872085 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.879902 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.893194 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 07:04:59 crc kubenswrapper[4644]: E1213 07:04:59.893620 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="706c4700-9d13-4bac-b7ac-0c19c09cc7e7" containerName="setup-container" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.893641 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="706c4700-9d13-4bac-b7ac-0c19c09cc7e7" containerName="setup-container" Dec 13 07:04:59 crc kubenswrapper[4644]: E1213 07:04:59.893676 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="706c4700-9d13-4bac-b7ac-0c19c09cc7e7" containerName="rabbitmq" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.893682 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="706c4700-9d13-4bac-b7ac-0c19c09cc7e7" containerName="rabbitmq" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.893894 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="706c4700-9d13-4bac-b7ac-0c19c09cc7e7" containerName="rabbitmq" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.894857 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.900036 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.900898 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.901472 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.901651 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.901879 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.901971 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.901700 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-mxxlr" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.902109 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.945649 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/43c1f8b3-1d91-4152-bf2d-be501022615a-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.945995 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/43c1f8b3-1d91-4152-bf2d-be501022615a-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.946165 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/43c1f8b3-1d91-4152-bf2d-be501022615a-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.946351 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5n75\" (UniqueName: \"kubernetes.io/projected/43c1f8b3-1d91-4152-bf2d-be501022615a-kube-api-access-k5n75\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.946457 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/43c1f8b3-1d91-4152-bf2d-be501022615a-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.946643 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/43c1f8b3-1d91-4152-bf2d-be501022615a-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.946733 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/43c1f8b3-1d91-4152-bf2d-be501022615a-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.946852 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/43c1f8b3-1d91-4152-bf2d-be501022615a-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.946936 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.947057 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/43c1f8b3-1d91-4152-bf2d-be501022615a-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:04:59 crc kubenswrapper[4644]: I1213 07:04:59.947175 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/43c1f8b3-1d91-4152-bf2d-be501022615a-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.048999 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/43c1f8b3-1d91-4152-bf2d-be501022615a-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.049415 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5n75\" (UniqueName: \"kubernetes.io/projected/43c1f8b3-1d91-4152-bf2d-be501022615a-kube-api-access-k5n75\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.049562 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/43c1f8b3-1d91-4152-bf2d-be501022615a-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.049771 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/43c1f8b3-1d91-4152-bf2d-be501022615a-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.049881 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/43c1f8b3-1d91-4152-bf2d-be501022615a-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.050018 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/43c1f8b3-1d91-4152-bf2d-be501022615a-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.050113 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.050247 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/43c1f8b3-1d91-4152-bf2d-be501022615a-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.050376 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/43c1f8b3-1d91-4152-bf2d-be501022615a-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.050485 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/43c1f8b3-1d91-4152-bf2d-be501022615a-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.050591 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/43c1f8b3-1d91-4152-bf2d-be501022615a-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.050744 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/43c1f8b3-1d91-4152-bf2d-be501022615a-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.050413 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/43c1f8b3-1d91-4152-bf2d-be501022615a-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.050518 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/43c1f8b3-1d91-4152-bf2d-be501022615a-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.050425 4644 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.051178 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/43c1f8b3-1d91-4152-bf2d-be501022615a-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.051304 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/43c1f8b3-1d91-4152-bf2d-be501022615a-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.055640 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/43c1f8b3-1d91-4152-bf2d-be501022615a-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.055662 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/43c1f8b3-1d91-4152-bf2d-be501022615a-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.056619 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/43c1f8b3-1d91-4152-bf2d-be501022615a-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.056690 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/43c1f8b3-1d91-4152-bf2d-be501022615a-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.064653 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5n75\" (UniqueName: \"kubernetes.io/projected/43c1f8b3-1d91-4152-bf2d-be501022615a-kube-api-access-k5n75\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.072393 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"43c1f8b3-1d91-4152-bf2d-be501022615a\") " pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.216344 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.401558 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0dd20500-a2dd-4608-a3c8-7d714ffb09c4" path="/var/lib/kubelet/pods/0dd20500-a2dd-4608-a3c8-7d714ffb09c4/volumes" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.402609 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="706c4700-9d13-4bac-b7ac-0c19c09cc7e7" path="/var/lib/kubelet/pods/706c4700-9d13-4bac-b7ac-0c19c09cc7e7/volumes" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.709969 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 13 07:05:00 crc kubenswrapper[4644]: W1213 07:05:00.740960 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod43c1f8b3_1d91_4152_bf2d_be501022615a.slice/crio-0c96940bd7b0f73e9aaa46f805057b6071d958dc3295f64dd7fc9e8f75768491 WatchSource:0}: Error finding container 0c96940bd7b0f73e9aaa46f805057b6071d958dc3295f64dd7fc9e8f75768491: Status 404 returned error can't find the container with id 0c96940bd7b0f73e9aaa46f805057b6071d958dc3295f64dd7fc9e8f75768491 Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.821665 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7bf6f4788c-q2v2s"] Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.829264 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.836181 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.837393 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bf6f4788c-q2v2s"] Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.868168 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"43c1f8b3-1d91-4152-bf2d-be501022615a","Type":"ContainerStarted","Data":"0c96940bd7b0f73e9aaa46f805057b6071d958dc3295f64dd7fc9e8f75768491"} Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.873009 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-openstack-edpm-ipam\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.873164 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-config\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.873187 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pgfp\" (UniqueName: \"kubernetes.io/projected/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-kube-api-access-6pgfp\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.873249 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-ovsdbserver-sb\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.873290 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-ovsdbserver-nb\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.873400 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-dns-svc\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.975033 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-dns-svc\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.975137 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-openstack-edpm-ipam\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.975283 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-config\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.975306 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pgfp\" (UniqueName: \"kubernetes.io/projected/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-kube-api-access-6pgfp\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.975356 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-ovsdbserver-sb\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.975392 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-ovsdbserver-nb\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.976037 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-config\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.976078 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-dns-svc\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.976710 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-openstack-edpm-ipam\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.976787 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-ovsdbserver-sb\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.976943 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-ovsdbserver-nb\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:00 crc kubenswrapper[4644]: I1213 07:05:00.991013 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pgfp\" (UniqueName: \"kubernetes.io/projected/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-kube-api-access-6pgfp\") pod \"dnsmasq-dns-7bf6f4788c-q2v2s\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:01 crc kubenswrapper[4644]: I1213 07:05:01.156724 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:01 crc kubenswrapper[4644]: I1213 07:05:01.569205 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bf6f4788c-q2v2s"] Dec 13 07:05:01 crc kubenswrapper[4644]: I1213 07:05:01.884223 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"78c20695-df2a-4d1d-a8e8-4dc7817b5803","Type":"ContainerStarted","Data":"61481c7898055288e74bf1525bdd29ff5b8c272f887a488395b1e80cbd0b9021"} Dec 13 07:05:01 crc kubenswrapper[4644]: I1213 07:05:01.886952 4644 generic.go:334] "Generic (PLEG): container finished" podID="6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e" containerID="ec9114278f69d2ef1a99985664f200d6c84e833a95380767ecb7f72afc73d1f5" exitCode=0 Dec 13 07:05:01 crc kubenswrapper[4644]: I1213 07:05:01.887037 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" event={"ID":"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e","Type":"ContainerDied","Data":"ec9114278f69d2ef1a99985664f200d6c84e833a95380767ecb7f72afc73d1f5"} Dec 13 07:05:01 crc kubenswrapper[4644]: I1213 07:05:01.887060 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" event={"ID":"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e","Type":"ContainerStarted","Data":"8fdec81a1c7855b5b2aa7a33dd9e9d675f901d68f3a3e52a6e7eeb4e7b1e92bf"} Dec 13 07:05:01 crc kubenswrapper[4644]: I1213 07:05:01.888704 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"43c1f8b3-1d91-4152-bf2d-be501022615a","Type":"ContainerStarted","Data":"004a0174a0a17ae58272cfce0f430771736af957b1bd360de81f2895556c453c"} Dec 13 07:05:02 crc kubenswrapper[4644]: I1213 07:05:02.897618 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" event={"ID":"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e","Type":"ContainerStarted","Data":"03fbbbf25ca23e7f5de161976772e585392f9387eb1e50e8bd1d014f1156f9e7"} Dec 13 07:05:02 crc kubenswrapper[4644]: I1213 07:05:02.898746 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:02 crc kubenswrapper[4644]: I1213 07:05:02.918105 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" podStartSLOduration=2.918077964 podStartE2EDuration="2.918077964s" podCreationTimestamp="2025-12-13 07:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:02.912897807 +0000 UTC m=+1165.127848640" watchObservedRunningTime="2025-12-13 07:05:02.918077964 +0000 UTC m=+1165.133028796" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.158594 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.200025 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f69c5c76f-qtbp6"] Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.200245 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" podUID="40dff749-3ac6-4c02-b892-e0d70c4e267f" containerName="dnsmasq-dns" containerID="cri-o://33e65c6df8f764c759d4ba20cc968eba42e6df4c6613abcbfc424ae07f38904e" gracePeriod=10 Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.306647 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5dfc84b95f-9lh9x"] Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.323576 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.339981 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5dfc84b95f-9lh9x"] Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.389004 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-dns-svc\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.389646 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-ovsdbserver-nb\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.389785 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-openstack-edpm-ipam\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.390483 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwlf2\" (UniqueName: \"kubernetes.io/projected/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-kube-api-access-lwlf2\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.390551 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-ovsdbserver-sb\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.390574 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-config\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.492600 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwlf2\" (UniqueName: \"kubernetes.io/projected/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-kube-api-access-lwlf2\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.492653 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-config\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.492676 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-ovsdbserver-sb\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.492806 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-dns-svc\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.492878 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-ovsdbserver-nb\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.492931 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-openstack-edpm-ipam\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.494260 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-openstack-edpm-ipam\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.494365 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-config\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.494656 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-dns-svc\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.494984 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-ovsdbserver-nb\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.496188 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-ovsdbserver-sb\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.513315 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwlf2\" (UniqueName: \"kubernetes.io/projected/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-kube-api-access-lwlf2\") pod \"dnsmasq-dns-5dfc84b95f-9lh9x\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.585145 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.673763 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.696109 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-ovsdbserver-nb\") pod \"40dff749-3ac6-4c02-b892-e0d70c4e267f\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.696157 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-dns-svc\") pod \"40dff749-3ac6-4c02-b892-e0d70c4e267f\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.696240 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-ovsdbserver-sb\") pod \"40dff749-3ac6-4c02-b892-e0d70c4e267f\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.696275 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc8rx\" (UniqueName: \"kubernetes.io/projected/40dff749-3ac6-4c02-b892-e0d70c4e267f-kube-api-access-gc8rx\") pod \"40dff749-3ac6-4c02-b892-e0d70c4e267f\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.696407 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-config\") pod \"40dff749-3ac6-4c02-b892-e0d70c4e267f\" (UID: \"40dff749-3ac6-4c02-b892-e0d70c4e267f\") " Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.710596 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40dff749-3ac6-4c02-b892-e0d70c4e267f-kube-api-access-gc8rx" (OuterVolumeSpecName: "kube-api-access-gc8rx") pod "40dff749-3ac6-4c02-b892-e0d70c4e267f" (UID: "40dff749-3ac6-4c02-b892-e0d70c4e267f"). InnerVolumeSpecName "kube-api-access-gc8rx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.729667 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "40dff749-3ac6-4c02-b892-e0d70c4e267f" (UID: "40dff749-3ac6-4c02-b892-e0d70c4e267f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.732087 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-config" (OuterVolumeSpecName: "config") pod "40dff749-3ac6-4c02-b892-e0d70c4e267f" (UID: "40dff749-3ac6-4c02-b892-e0d70c4e267f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.733343 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "40dff749-3ac6-4c02-b892-e0d70c4e267f" (UID: "40dff749-3ac6-4c02-b892-e0d70c4e267f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.734138 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "40dff749-3ac6-4c02-b892-e0d70c4e267f" (UID: "40dff749-3ac6-4c02-b892-e0d70c4e267f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.799252 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.799284 4644 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.799294 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.799303 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc8rx\" (UniqueName: \"kubernetes.io/projected/40dff749-3ac6-4c02-b892-e0d70c4e267f-kube-api-access-gc8rx\") on node \"crc\" DevicePath \"\"" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.799312 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40dff749-3ac6-4c02-b892-e0d70c4e267f-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.965614 4644 generic.go:334] "Generic (PLEG): container finished" podID="40dff749-3ac6-4c02-b892-e0d70c4e267f" containerID="33e65c6df8f764c759d4ba20cc968eba42e6df4c6613abcbfc424ae07f38904e" exitCode=0 Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.965656 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" event={"ID":"40dff749-3ac6-4c02-b892-e0d70c4e267f","Type":"ContainerDied","Data":"33e65c6df8f764c759d4ba20cc968eba42e6df4c6613abcbfc424ae07f38904e"} Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.965675 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.965693 4644 scope.go:117] "RemoveContainer" containerID="33e65c6df8f764c759d4ba20cc968eba42e6df4c6613abcbfc424ae07f38904e" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.965680 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f69c5c76f-qtbp6" event={"ID":"40dff749-3ac6-4c02-b892-e0d70c4e267f","Type":"ContainerDied","Data":"be691b2ff7b7517ffbf0dcd37deebbeea3f91994577126e10e8b1521acff34e0"} Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.987592 4644 scope.go:117] "RemoveContainer" containerID="e8a10d061eda4493e42c0d950f165c5bcacfe182a9ab17865b739ae4fd491a34" Dec 13 07:05:11 crc kubenswrapper[4644]: I1213 07:05:11.995144 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f69c5c76f-qtbp6"] Dec 13 07:05:12 crc kubenswrapper[4644]: I1213 07:05:12.002390 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6f69c5c76f-qtbp6"] Dec 13 07:05:12 crc kubenswrapper[4644]: I1213 07:05:12.023125 4644 scope.go:117] "RemoveContainer" containerID="33e65c6df8f764c759d4ba20cc968eba42e6df4c6613abcbfc424ae07f38904e" Dec 13 07:05:12 crc kubenswrapper[4644]: E1213 07:05:12.023588 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33e65c6df8f764c759d4ba20cc968eba42e6df4c6613abcbfc424ae07f38904e\": container with ID starting with 33e65c6df8f764c759d4ba20cc968eba42e6df4c6613abcbfc424ae07f38904e not found: ID does not exist" containerID="33e65c6df8f764c759d4ba20cc968eba42e6df4c6613abcbfc424ae07f38904e" Dec 13 07:05:12 crc kubenswrapper[4644]: I1213 07:05:12.023622 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33e65c6df8f764c759d4ba20cc968eba42e6df4c6613abcbfc424ae07f38904e"} err="failed to get container status \"33e65c6df8f764c759d4ba20cc968eba42e6df4c6613abcbfc424ae07f38904e\": rpc error: code = NotFound desc = could not find container \"33e65c6df8f764c759d4ba20cc968eba42e6df4c6613abcbfc424ae07f38904e\": container with ID starting with 33e65c6df8f764c759d4ba20cc968eba42e6df4c6613abcbfc424ae07f38904e not found: ID does not exist" Dec 13 07:05:12 crc kubenswrapper[4644]: I1213 07:05:12.023643 4644 scope.go:117] "RemoveContainer" containerID="e8a10d061eda4493e42c0d950f165c5bcacfe182a9ab17865b739ae4fd491a34" Dec 13 07:05:12 crc kubenswrapper[4644]: E1213 07:05:12.023969 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8a10d061eda4493e42c0d950f165c5bcacfe182a9ab17865b739ae4fd491a34\": container with ID starting with e8a10d061eda4493e42c0d950f165c5bcacfe182a9ab17865b739ae4fd491a34 not found: ID does not exist" containerID="e8a10d061eda4493e42c0d950f165c5bcacfe182a9ab17865b739ae4fd491a34" Dec 13 07:05:12 crc kubenswrapper[4644]: I1213 07:05:12.023991 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8a10d061eda4493e42c0d950f165c5bcacfe182a9ab17865b739ae4fd491a34"} err="failed to get container status \"e8a10d061eda4493e42c0d950f165c5bcacfe182a9ab17865b739ae4fd491a34\": rpc error: code = NotFound desc = could not find container \"e8a10d061eda4493e42c0d950f165c5bcacfe182a9ab17865b739ae4fd491a34\": container with ID starting with e8a10d061eda4493e42c0d950f165c5bcacfe182a9ab17865b739ae4fd491a34 not found: ID does not exist" Dec 13 07:05:12 crc kubenswrapper[4644]: I1213 07:05:12.072871 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5dfc84b95f-9lh9x"] Dec 13 07:05:12 crc kubenswrapper[4644]: I1213 07:05:12.400934 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40dff749-3ac6-4c02-b892-e0d70c4e267f" path="/var/lib/kubelet/pods/40dff749-3ac6-4c02-b892-e0d70c4e267f/volumes" Dec 13 07:05:12 crc kubenswrapper[4644]: I1213 07:05:12.975902 4644 generic.go:334] "Generic (PLEG): container finished" podID="c6b6b66f-5b04-4c2e-8c8c-bb8c21198597" containerID="595e3290c9d6ff654cbdf4ed4cf50210226f490aa0b270da249150d33c5063a6" exitCode=0 Dec 13 07:05:12 crc kubenswrapper[4644]: I1213 07:05:12.975976 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" event={"ID":"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597","Type":"ContainerDied","Data":"595e3290c9d6ff654cbdf4ed4cf50210226f490aa0b270da249150d33c5063a6"} Dec 13 07:05:12 crc kubenswrapper[4644]: I1213 07:05:12.976010 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" event={"ID":"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597","Type":"ContainerStarted","Data":"d1fdebdb9213a5cf6153a9ad35edce34a828f7c1bb2bd03068e2ecf0e91979af"} Dec 13 07:05:13 crc kubenswrapper[4644]: I1213 07:05:13.988559 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" event={"ID":"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597","Type":"ContainerStarted","Data":"8acb6f988f074775bfa1ec565084c6b72d32422160eb795fd2ab12f4dcda841d"} Dec 13 07:05:13 crc kubenswrapper[4644]: I1213 07:05:13.989622 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:14 crc kubenswrapper[4644]: I1213 07:05:14.007223 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" podStartSLOduration=3.007201051 podStartE2EDuration="3.007201051s" podCreationTimestamp="2025-12-13 07:05:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:14.002650658 +0000 UTC m=+1176.217601492" watchObservedRunningTime="2025-12-13 07:05:14.007201051 +0000 UTC m=+1176.222151884" Dec 13 07:05:21 crc kubenswrapper[4644]: I1213 07:05:21.675629 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:05:21 crc kubenswrapper[4644]: I1213 07:05:21.723004 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bf6f4788c-q2v2s"] Dec 13 07:05:21 crc kubenswrapper[4644]: I1213 07:05:21.723309 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" podUID="6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e" containerName="dnsmasq-dns" containerID="cri-o://03fbbbf25ca23e7f5de161976772e585392f9387eb1e50e8bd1d014f1156f9e7" gracePeriod=10 Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.057936 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" event={"ID":"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e","Type":"ContainerDied","Data":"03fbbbf25ca23e7f5de161976772e585392f9387eb1e50e8bd1d014f1156f9e7"} Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.057979 4644 generic.go:334] "Generic (PLEG): container finished" podID="6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e" containerID="03fbbbf25ca23e7f5de161976772e585392f9387eb1e50e8bd1d014f1156f9e7" exitCode=0 Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.130178 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.202914 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-ovsdbserver-sb\") pod \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.203071 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pgfp\" (UniqueName: \"kubernetes.io/projected/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-kube-api-access-6pgfp\") pod \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.203195 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-dns-svc\") pod \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.203285 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-openstack-edpm-ipam\") pod \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.203377 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-ovsdbserver-nb\") pod \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.203416 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-config\") pod \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\" (UID: \"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e\") " Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.208505 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-kube-api-access-6pgfp" (OuterVolumeSpecName: "kube-api-access-6pgfp") pod "6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e" (UID: "6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e"). InnerVolumeSpecName "kube-api-access-6pgfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.242579 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e" (UID: "6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.246399 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-config" (OuterVolumeSpecName: "config") pod "6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e" (UID: "6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.247347 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e" (UID: "6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.251478 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e" (UID: "6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.255393 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e" (UID: "6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.306199 4644 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.306237 4644 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.306251 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.306262 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.306271 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 07:05:22 crc kubenswrapper[4644]: I1213 07:05:22.306282 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pgfp\" (UniqueName: \"kubernetes.io/projected/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e-kube-api-access-6pgfp\") on node \"crc\" DevicePath \"\"" Dec 13 07:05:23 crc kubenswrapper[4644]: I1213 07:05:23.067203 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" event={"ID":"6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e","Type":"ContainerDied","Data":"8fdec81a1c7855b5b2aa7a33dd9e9d675f901d68f3a3e52a6e7eeb4e7b1e92bf"} Dec 13 07:05:23 crc kubenswrapper[4644]: I1213 07:05:23.068070 4644 scope.go:117] "RemoveContainer" containerID="03fbbbf25ca23e7f5de161976772e585392f9387eb1e50e8bd1d014f1156f9e7" Dec 13 07:05:23 crc kubenswrapper[4644]: I1213 07:05:23.067280 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bf6f4788c-q2v2s" Dec 13 07:05:23 crc kubenswrapper[4644]: I1213 07:05:23.087873 4644 scope.go:117] "RemoveContainer" containerID="ec9114278f69d2ef1a99985664f200d6c84e833a95380767ecb7f72afc73d1f5" Dec 13 07:05:23 crc kubenswrapper[4644]: I1213 07:05:23.087945 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bf6f4788c-q2v2s"] Dec 13 07:05:23 crc kubenswrapper[4644]: I1213 07:05:23.113008 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7bf6f4788c-q2v2s"] Dec 13 07:05:24 crc kubenswrapper[4644]: I1213 07:05:24.398099 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e" path="/var/lib/kubelet/pods/6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e/volumes" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.893416 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw"] Dec 13 07:05:31 crc kubenswrapper[4644]: E1213 07:05:31.894236 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e" containerName="init" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.894251 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e" containerName="init" Dec 13 07:05:31 crc kubenswrapper[4644]: E1213 07:05:31.894264 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40dff749-3ac6-4c02-b892-e0d70c4e267f" containerName="dnsmasq-dns" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.894270 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="40dff749-3ac6-4c02-b892-e0d70c4e267f" containerName="dnsmasq-dns" Dec 13 07:05:31 crc kubenswrapper[4644]: E1213 07:05:31.894279 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e" containerName="dnsmasq-dns" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.894286 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e" containerName="dnsmasq-dns" Dec 13 07:05:31 crc kubenswrapper[4644]: E1213 07:05:31.894312 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40dff749-3ac6-4c02-b892-e0d70c4e267f" containerName="init" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.894317 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="40dff749-3ac6-4c02-b892-e0d70c4e267f" containerName="init" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.894528 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ba295a1-b0fe-49be-bfc9-a7e5a7dcd55e" containerName="dnsmasq-dns" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.894544 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="40dff749-3ac6-4c02-b892-e0d70c4e267f" containerName="dnsmasq-dns" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.895130 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.898530 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.898638 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.898796 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.904026 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.911729 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw"] Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.991255 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk6gc\" (UniqueName: \"kubernetes.io/projected/02272783-8f74-4b52-9518-cb97dcf8205b-kube-api-access-gk6gc\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.991736 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.992163 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:31 crc kubenswrapper[4644]: I1213 07:05:31.992308 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:32 crc kubenswrapper[4644]: I1213 07:05:32.094529 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:32 crc kubenswrapper[4644]: I1213 07:05:32.094653 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:32 crc kubenswrapper[4644]: I1213 07:05:32.094729 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:32 crc kubenswrapper[4644]: I1213 07:05:32.094775 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk6gc\" (UniqueName: \"kubernetes.io/projected/02272783-8f74-4b52-9518-cb97dcf8205b-kube-api-access-gk6gc\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:32 crc kubenswrapper[4644]: I1213 07:05:32.100559 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:32 crc kubenswrapper[4644]: I1213 07:05:32.100569 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:32 crc kubenswrapper[4644]: I1213 07:05:32.101199 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:32 crc kubenswrapper[4644]: I1213 07:05:32.109787 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk6gc\" (UniqueName: \"kubernetes.io/projected/02272783-8f74-4b52-9518-cb97dcf8205b-kube-api-access-gk6gc\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:32 crc kubenswrapper[4644]: I1213 07:05:32.216045 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:32 crc kubenswrapper[4644]: I1213 07:05:32.677300 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw"] Dec 13 07:05:32 crc kubenswrapper[4644]: W1213 07:05:32.701671 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02272783_8f74_4b52_9518_cb97dcf8205b.slice/crio-996c5a1156d255ec8d2d4c81f64b0204b52a1b91187fdce77590de791dfd86d1 WatchSource:0}: Error finding container 996c5a1156d255ec8d2d4c81f64b0204b52a1b91187fdce77590de791dfd86d1: Status 404 returned error can't find the container with id 996c5a1156d255ec8d2d4c81f64b0204b52a1b91187fdce77590de791dfd86d1 Dec 13 07:05:32 crc kubenswrapper[4644]: I1213 07:05:32.704090 4644 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 07:05:33 crc kubenswrapper[4644]: I1213 07:05:33.143425 4644 generic.go:334] "Generic (PLEG): container finished" podID="78c20695-df2a-4d1d-a8e8-4dc7817b5803" containerID="61481c7898055288e74bf1525bdd29ff5b8c272f887a488395b1e80cbd0b9021" exitCode=0 Dec 13 07:05:33 crc kubenswrapper[4644]: I1213 07:05:33.143707 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"78c20695-df2a-4d1d-a8e8-4dc7817b5803","Type":"ContainerDied","Data":"61481c7898055288e74bf1525bdd29ff5b8c272f887a488395b1e80cbd0b9021"} Dec 13 07:05:33 crc kubenswrapper[4644]: I1213 07:05:33.145364 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" event={"ID":"02272783-8f74-4b52-9518-cb97dcf8205b","Type":"ContainerStarted","Data":"996c5a1156d255ec8d2d4c81f64b0204b52a1b91187fdce77590de791dfd86d1"} Dec 13 07:05:34 crc kubenswrapper[4644]: I1213 07:05:34.159771 4644 generic.go:334] "Generic (PLEG): container finished" podID="43c1f8b3-1d91-4152-bf2d-be501022615a" containerID="004a0174a0a17ae58272cfce0f430771736af957b1bd360de81f2895556c453c" exitCode=0 Dec 13 07:05:34 crc kubenswrapper[4644]: I1213 07:05:34.160042 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"43c1f8b3-1d91-4152-bf2d-be501022615a","Type":"ContainerDied","Data":"004a0174a0a17ae58272cfce0f430771736af957b1bd360de81f2895556c453c"} Dec 13 07:05:34 crc kubenswrapper[4644]: I1213 07:05:34.163403 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"78c20695-df2a-4d1d-a8e8-4dc7817b5803","Type":"ContainerStarted","Data":"e165053796189b6f3318df7f68580015165ec62b1c328ae9d209ead2411a3827"} Dec 13 07:05:34 crc kubenswrapper[4644]: I1213 07:05:34.164257 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 13 07:05:34 crc kubenswrapper[4644]: I1213 07:05:34.210833 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.210798155 podStartE2EDuration="36.210798155s" podCreationTimestamp="2025-12-13 07:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:34.204038099 +0000 UTC m=+1196.418988932" watchObservedRunningTime="2025-12-13 07:05:34.210798155 +0000 UTC m=+1196.425748988" Dec 13 07:05:35 crc kubenswrapper[4644]: I1213 07:05:35.175292 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"43c1f8b3-1d91-4152-bf2d-be501022615a","Type":"ContainerStarted","Data":"69237c9a71a491815e8bc47c7e9ee62a4cf51358e3c0b981ad2d3d23e1a7e080"} Dec 13 07:05:35 crc kubenswrapper[4644]: I1213 07:05:35.176263 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:35 crc kubenswrapper[4644]: I1213 07:05:35.198632 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.198607574 podStartE2EDuration="36.198607574s" podCreationTimestamp="2025-12-13 07:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:35.195788766 +0000 UTC m=+1197.410739609" watchObservedRunningTime="2025-12-13 07:05:35.198607574 +0000 UTC m=+1197.413558407" Dec 13 07:05:41 crc kubenswrapper[4644]: I1213 07:05:41.114375 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:05:42 crc kubenswrapper[4644]: I1213 07:05:42.235555 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" event={"ID":"02272783-8f74-4b52-9518-cb97dcf8205b","Type":"ContainerStarted","Data":"e11dc725bccd6bd6471a672de3e45f229a9da8881f4974761bbd5ce158470e84"} Dec 13 07:05:42 crc kubenswrapper[4644]: I1213 07:05:42.251019 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" podStartSLOduration=2.842782392 podStartE2EDuration="11.251004947s" podCreationTimestamp="2025-12-13 07:05:31 +0000 UTC" firstStartedPulling="2025-12-13 07:05:32.703808009 +0000 UTC m=+1194.918758842" lastFinishedPulling="2025-12-13 07:05:41.112030564 +0000 UTC m=+1203.326981397" observedRunningTime="2025-12-13 07:05:42.248540615 +0000 UTC m=+1204.463491449" watchObservedRunningTime="2025-12-13 07:05:42.251004947 +0000 UTC m=+1204.465955780" Dec 13 07:05:49 crc kubenswrapper[4644]: I1213 07:05:49.246888 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 13 07:05:50 crc kubenswrapper[4644]: I1213 07:05:50.219668 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 13 07:05:53 crc kubenswrapper[4644]: I1213 07:05:53.341368 4644 generic.go:334] "Generic (PLEG): container finished" podID="02272783-8f74-4b52-9518-cb97dcf8205b" containerID="e11dc725bccd6bd6471a672de3e45f229a9da8881f4974761bbd5ce158470e84" exitCode=0 Dec 13 07:05:53 crc kubenswrapper[4644]: I1213 07:05:53.341501 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" event={"ID":"02272783-8f74-4b52-9518-cb97dcf8205b","Type":"ContainerDied","Data":"e11dc725bccd6bd6471a672de3e45f229a9da8881f4974761bbd5ce158470e84"} Dec 13 07:05:54 crc kubenswrapper[4644]: I1213 07:05:54.680102 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:54 crc kubenswrapper[4644]: I1213 07:05:54.776416 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-ssh-key\") pod \"02272783-8f74-4b52-9518-cb97dcf8205b\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " Dec 13 07:05:54 crc kubenswrapper[4644]: I1213 07:05:54.776571 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-repo-setup-combined-ca-bundle\") pod \"02272783-8f74-4b52-9518-cb97dcf8205b\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " Dec 13 07:05:54 crc kubenswrapper[4644]: I1213 07:05:54.776706 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-inventory\") pod \"02272783-8f74-4b52-9518-cb97dcf8205b\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " Dec 13 07:05:54 crc kubenswrapper[4644]: I1213 07:05:54.776771 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gk6gc\" (UniqueName: \"kubernetes.io/projected/02272783-8f74-4b52-9518-cb97dcf8205b-kube-api-access-gk6gc\") pod \"02272783-8f74-4b52-9518-cb97dcf8205b\" (UID: \"02272783-8f74-4b52-9518-cb97dcf8205b\") " Dec 13 07:05:54 crc kubenswrapper[4644]: I1213 07:05:54.782431 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "02272783-8f74-4b52-9518-cb97dcf8205b" (UID: "02272783-8f74-4b52-9518-cb97dcf8205b"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:05:54 crc kubenswrapper[4644]: I1213 07:05:54.782541 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02272783-8f74-4b52-9518-cb97dcf8205b-kube-api-access-gk6gc" (OuterVolumeSpecName: "kube-api-access-gk6gc") pod "02272783-8f74-4b52-9518-cb97dcf8205b" (UID: "02272783-8f74-4b52-9518-cb97dcf8205b"). InnerVolumeSpecName "kube-api-access-gk6gc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:05:54 crc kubenswrapper[4644]: I1213 07:05:54.799605 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "02272783-8f74-4b52-9518-cb97dcf8205b" (UID: "02272783-8f74-4b52-9518-cb97dcf8205b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:05:54 crc kubenswrapper[4644]: I1213 07:05:54.801735 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-inventory" (OuterVolumeSpecName: "inventory") pod "02272783-8f74-4b52-9518-cb97dcf8205b" (UID: "02272783-8f74-4b52-9518-cb97dcf8205b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:05:54 crc kubenswrapper[4644]: I1213 07:05:54.879593 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:05:54 crc kubenswrapper[4644]: I1213 07:05:54.879631 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gk6gc\" (UniqueName: \"kubernetes.io/projected/02272783-8f74-4b52-9518-cb97dcf8205b-kube-api-access-gk6gc\") on node \"crc\" DevicePath \"\"" Dec 13 07:05:54 crc kubenswrapper[4644]: I1213 07:05:54.879641 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:05:54 crc kubenswrapper[4644]: I1213 07:05:54.879653 4644 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02272783-8f74-4b52-9518-cb97dcf8205b-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.361629 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" event={"ID":"02272783-8f74-4b52-9518-cb97dcf8205b","Type":"ContainerDied","Data":"996c5a1156d255ec8d2d4c81f64b0204b52a1b91187fdce77590de791dfd86d1"} Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.361673 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="996c5a1156d255ec8d2d4c81f64b0204b52a1b91187fdce77590de791dfd86d1" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.361735 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.424423 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz"] Dec 13 07:05:55 crc kubenswrapper[4644]: E1213 07:05:55.424914 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02272783-8f74-4b52-9518-cb97dcf8205b" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.424936 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="02272783-8f74-4b52-9518-cb97dcf8205b" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.425176 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="02272783-8f74-4b52-9518-cb97dcf8205b" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.425896 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.428926 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.429299 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.429510 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.429596 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.433060 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz"] Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.492909 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbk5n\" (UniqueName: \"kubernetes.io/projected/59121abb-74f1-43fb-ba78-29bf6cdb871e-kube-api-access-gbk5n\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.493093 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.493214 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.493376 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.596475 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.596897 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbk5n\" (UniqueName: \"kubernetes.io/projected/59121abb-74f1-43fb-ba78-29bf6cdb871e-kube-api-access-gbk5n\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.597588 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.598298 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.603105 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.603269 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.604361 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.614726 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbk5n\" (UniqueName: \"kubernetes.io/projected/59121abb-74f1-43fb-ba78-29bf6cdb871e-kube-api-access-gbk5n\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:05:55 crc kubenswrapper[4644]: I1213 07:05:55.753138 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:05:56 crc kubenswrapper[4644]: I1213 07:05:56.223124 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz"] Dec 13 07:05:56 crc kubenswrapper[4644]: W1213 07:05:56.225577 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59121abb_74f1_43fb_ba78_29bf6cdb871e.slice/crio-293d9631717c5211e558aefa222b0355b6be1b25651717a5e3c2897803a92840 WatchSource:0}: Error finding container 293d9631717c5211e558aefa222b0355b6be1b25651717a5e3c2897803a92840: Status 404 returned error can't find the container with id 293d9631717c5211e558aefa222b0355b6be1b25651717a5e3c2897803a92840 Dec 13 07:05:56 crc kubenswrapper[4644]: I1213 07:05:56.370420 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" event={"ID":"59121abb-74f1-43fb-ba78-29bf6cdb871e","Type":"ContainerStarted","Data":"293d9631717c5211e558aefa222b0355b6be1b25651717a5e3c2897803a92840"} Dec 13 07:05:57 crc kubenswrapper[4644]: I1213 07:05:57.380466 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" event={"ID":"59121abb-74f1-43fb-ba78-29bf6cdb871e","Type":"ContainerStarted","Data":"de5b80f5be9726757ca004c2540a881265471782a5b760644d1363cb15d0bdfd"} Dec 13 07:05:57 crc kubenswrapper[4644]: I1213 07:05:57.393880 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" podStartSLOduration=1.641209199 podStartE2EDuration="2.393859448s" podCreationTimestamp="2025-12-13 07:05:55 +0000 UTC" firstStartedPulling="2025-12-13 07:05:56.228301191 +0000 UTC m=+1218.443252025" lastFinishedPulling="2025-12-13 07:05:56.980951441 +0000 UTC m=+1219.195902274" observedRunningTime="2025-12-13 07:05:57.393347255 +0000 UTC m=+1219.608298098" watchObservedRunningTime="2025-12-13 07:05:57.393859448 +0000 UTC m=+1219.608810281" Dec 13 07:07:09 crc kubenswrapper[4644]: I1213 07:07:09.753300 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:07:09 crc kubenswrapper[4644]: I1213 07:07:09.753688 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:07:39 crc kubenswrapper[4644]: I1213 07:07:39.754020 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:07:39 crc kubenswrapper[4644]: I1213 07:07:39.754581 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:07:41 crc kubenswrapper[4644]: I1213 07:07:41.134502 4644 scope.go:117] "RemoveContainer" containerID="850a847c976fc9bdf5b83ca9b3d7d9183b75eba17920b6b7642af80e5d0fe15c" Dec 13 07:07:41 crc kubenswrapper[4644]: I1213 07:07:41.179492 4644 scope.go:117] "RemoveContainer" containerID="14d0d1d27761cb9349c57dbc6743ac185316082442fe023a224d9d2347d9978f" Dec 13 07:08:09 crc kubenswrapper[4644]: I1213 07:08:09.753489 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:08:09 crc kubenswrapper[4644]: I1213 07:08:09.754355 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:08:09 crc kubenswrapper[4644]: I1213 07:08:09.754433 4644 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 07:08:09 crc kubenswrapper[4644]: I1213 07:08:09.755424 4644 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c27c345ea5c1252e6754fce190bc5091a29be12a2dec59380a2edf28ec34cb52"} pod="openshift-machine-config-operator/machine-config-daemon-45tj4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 07:08:09 crc kubenswrapper[4644]: I1213 07:08:09.755515 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" containerID="cri-o://c27c345ea5c1252e6754fce190bc5091a29be12a2dec59380a2edf28ec34cb52" gracePeriod=600 Dec 13 07:08:10 crc kubenswrapper[4644]: I1213 07:08:10.360390 4644 generic.go:334] "Generic (PLEG): container finished" podID="48240f19-087e-4597-b448-ab1a190a5027" containerID="c27c345ea5c1252e6754fce190bc5091a29be12a2dec59380a2edf28ec34cb52" exitCode=0 Dec 13 07:08:10 crc kubenswrapper[4644]: I1213 07:08:10.360433 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerDied","Data":"c27c345ea5c1252e6754fce190bc5091a29be12a2dec59380a2edf28ec34cb52"} Dec 13 07:08:10 crc kubenswrapper[4644]: I1213 07:08:10.360478 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerStarted","Data":"5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130"} Dec 13 07:08:10 crc kubenswrapper[4644]: I1213 07:08:10.360495 4644 scope.go:117] "RemoveContainer" containerID="e41b9eff015e4f806befa3edcbb8d5a320f89b209722141f8d73fdf31b010f61" Dec 13 07:08:41 crc kubenswrapper[4644]: I1213 07:08:41.265229 4644 scope.go:117] "RemoveContainer" containerID="26ca95e03e55f22c0404cf6fee0e21bee0f31128ea90589eada6a053eed352e9" Dec 13 07:08:41 crc kubenswrapper[4644]: I1213 07:08:41.293013 4644 scope.go:117] "RemoveContainer" containerID="05f6ca39a0251b40d12afc00d55c4ec5e4fc967000cd617e5ba52ce8830e779d" Dec 13 07:09:19 crc kubenswrapper[4644]: I1213 07:09:19.879377 4644 generic.go:334] "Generic (PLEG): container finished" podID="59121abb-74f1-43fb-ba78-29bf6cdb871e" containerID="de5b80f5be9726757ca004c2540a881265471782a5b760644d1363cb15d0bdfd" exitCode=0 Dec 13 07:09:19 crc kubenswrapper[4644]: I1213 07:09:19.879470 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" event={"ID":"59121abb-74f1-43fb-ba78-29bf6cdb871e","Type":"ContainerDied","Data":"de5b80f5be9726757ca004c2540a881265471782a5b760644d1363cb15d0bdfd"} Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.214458 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.308164 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-inventory\") pod \"59121abb-74f1-43fb-ba78-29bf6cdb871e\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.308365 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbk5n\" (UniqueName: \"kubernetes.io/projected/59121abb-74f1-43fb-ba78-29bf6cdb871e-kube-api-access-gbk5n\") pod \"59121abb-74f1-43fb-ba78-29bf6cdb871e\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.308471 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-ssh-key\") pod \"59121abb-74f1-43fb-ba78-29bf6cdb871e\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.308543 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-bootstrap-combined-ca-bundle\") pod \"59121abb-74f1-43fb-ba78-29bf6cdb871e\" (UID: \"59121abb-74f1-43fb-ba78-29bf6cdb871e\") " Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.314040 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "59121abb-74f1-43fb-ba78-29bf6cdb871e" (UID: "59121abb-74f1-43fb-ba78-29bf6cdb871e"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.314369 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59121abb-74f1-43fb-ba78-29bf6cdb871e-kube-api-access-gbk5n" (OuterVolumeSpecName: "kube-api-access-gbk5n") pod "59121abb-74f1-43fb-ba78-29bf6cdb871e" (UID: "59121abb-74f1-43fb-ba78-29bf6cdb871e"). InnerVolumeSpecName "kube-api-access-gbk5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.331091 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "59121abb-74f1-43fb-ba78-29bf6cdb871e" (UID: "59121abb-74f1-43fb-ba78-29bf6cdb871e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.331117 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-inventory" (OuterVolumeSpecName: "inventory") pod "59121abb-74f1-43fb-ba78-29bf6cdb871e" (UID: "59121abb-74f1-43fb-ba78-29bf6cdb871e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.411500 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbk5n\" (UniqueName: \"kubernetes.io/projected/59121abb-74f1-43fb-ba78-29bf6cdb871e-kube-api-access-gbk5n\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.411533 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.411543 4644 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.411553 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/59121abb-74f1-43fb-ba78-29bf6cdb871e-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.897115 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" event={"ID":"59121abb-74f1-43fb-ba78-29bf6cdb871e","Type":"ContainerDied","Data":"293d9631717c5211e558aefa222b0355b6be1b25651717a5e3c2897803a92840"} Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.897155 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.897164 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="293d9631717c5211e558aefa222b0355b6be1b25651717a5e3c2897803a92840" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.964820 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n"] Dec 13 07:09:21 crc kubenswrapper[4644]: E1213 07:09:21.965311 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59121abb-74f1-43fb-ba78-29bf6cdb871e" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.965337 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="59121abb-74f1-43fb-ba78-29bf6cdb871e" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.965556 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="59121abb-74f1-43fb-ba78-29bf6cdb871e" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.966210 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.969005 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.969167 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.969167 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.969177 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:09:21 crc kubenswrapper[4644]: I1213 07:09:21.975068 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n"] Dec 13 07:09:22 crc kubenswrapper[4644]: I1213 07:09:22.023743 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c87e60d6-1b2d-47eb-99ae-543b7b454813-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nd86n\" (UID: \"c87e60d6-1b2d-47eb-99ae-543b7b454813\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" Dec 13 07:09:22 crc kubenswrapper[4644]: I1213 07:09:22.024118 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzgw9\" (UniqueName: \"kubernetes.io/projected/c87e60d6-1b2d-47eb-99ae-543b7b454813-kube-api-access-tzgw9\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nd86n\" (UID: \"c87e60d6-1b2d-47eb-99ae-543b7b454813\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" Dec 13 07:09:22 crc kubenswrapper[4644]: I1213 07:09:22.024350 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c87e60d6-1b2d-47eb-99ae-543b7b454813-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nd86n\" (UID: \"c87e60d6-1b2d-47eb-99ae-543b7b454813\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" Dec 13 07:09:22 crc kubenswrapper[4644]: I1213 07:09:22.126549 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c87e60d6-1b2d-47eb-99ae-543b7b454813-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nd86n\" (UID: \"c87e60d6-1b2d-47eb-99ae-543b7b454813\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" Dec 13 07:09:22 crc kubenswrapper[4644]: I1213 07:09:22.126661 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzgw9\" (UniqueName: \"kubernetes.io/projected/c87e60d6-1b2d-47eb-99ae-543b7b454813-kube-api-access-tzgw9\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nd86n\" (UID: \"c87e60d6-1b2d-47eb-99ae-543b7b454813\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" Dec 13 07:09:22 crc kubenswrapper[4644]: I1213 07:09:22.126811 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c87e60d6-1b2d-47eb-99ae-543b7b454813-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nd86n\" (UID: \"c87e60d6-1b2d-47eb-99ae-543b7b454813\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" Dec 13 07:09:22 crc kubenswrapper[4644]: I1213 07:09:22.130871 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c87e60d6-1b2d-47eb-99ae-543b7b454813-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nd86n\" (UID: \"c87e60d6-1b2d-47eb-99ae-543b7b454813\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" Dec 13 07:09:22 crc kubenswrapper[4644]: I1213 07:09:22.131228 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c87e60d6-1b2d-47eb-99ae-543b7b454813-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nd86n\" (UID: \"c87e60d6-1b2d-47eb-99ae-543b7b454813\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" Dec 13 07:09:22 crc kubenswrapper[4644]: I1213 07:09:22.140409 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzgw9\" (UniqueName: \"kubernetes.io/projected/c87e60d6-1b2d-47eb-99ae-543b7b454813-kube-api-access-tzgw9\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nd86n\" (UID: \"c87e60d6-1b2d-47eb-99ae-543b7b454813\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" Dec 13 07:09:22 crc kubenswrapper[4644]: I1213 07:09:22.285113 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" Dec 13 07:09:22 crc kubenswrapper[4644]: I1213 07:09:22.743900 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n"] Dec 13 07:09:22 crc kubenswrapper[4644]: I1213 07:09:22.904958 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" event={"ID":"c87e60d6-1b2d-47eb-99ae-543b7b454813","Type":"ContainerStarted","Data":"543fd080feed5451ed4d46230c29fc530d6410819326b884069bcbe16334228c"} Dec 13 07:09:23 crc kubenswrapper[4644]: I1213 07:09:23.914864 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" event={"ID":"c87e60d6-1b2d-47eb-99ae-543b7b454813","Type":"ContainerStarted","Data":"d2e3dfc53e3a26f147eba9af7aeaa67176249426fdb0942a483d0d44443c4e13"} Dec 13 07:09:23 crc kubenswrapper[4644]: I1213 07:09:23.938386 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" podStartSLOduration=2.345837334 podStartE2EDuration="2.938368553s" podCreationTimestamp="2025-12-13 07:09:21 +0000 UTC" firstStartedPulling="2025-12-13 07:09:22.745146182 +0000 UTC m=+1424.960097015" lastFinishedPulling="2025-12-13 07:09:23.337677401 +0000 UTC m=+1425.552628234" observedRunningTime="2025-12-13 07:09:23.932224541 +0000 UTC m=+1426.147175375" watchObservedRunningTime="2025-12-13 07:09:23.938368553 +0000 UTC m=+1426.153319385" Dec 13 07:10:26 crc kubenswrapper[4644]: I1213 07:10:26.405810 4644 generic.go:334] "Generic (PLEG): container finished" podID="c87e60d6-1b2d-47eb-99ae-543b7b454813" containerID="d2e3dfc53e3a26f147eba9af7aeaa67176249426fdb0942a483d0d44443c4e13" exitCode=0 Dec 13 07:10:26 crc kubenswrapper[4644]: I1213 07:10:26.405882 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" event={"ID":"c87e60d6-1b2d-47eb-99ae-543b7b454813","Type":"ContainerDied","Data":"d2e3dfc53e3a26f147eba9af7aeaa67176249426fdb0942a483d0d44443c4e13"} Dec 13 07:10:27 crc kubenswrapper[4644]: I1213 07:10:27.754002 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" Dec 13 07:10:27 crc kubenswrapper[4644]: I1213 07:10:27.809556 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c87e60d6-1b2d-47eb-99ae-543b7b454813-inventory\") pod \"c87e60d6-1b2d-47eb-99ae-543b7b454813\" (UID: \"c87e60d6-1b2d-47eb-99ae-543b7b454813\") " Dec 13 07:10:27 crc kubenswrapper[4644]: I1213 07:10:27.809697 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzgw9\" (UniqueName: \"kubernetes.io/projected/c87e60d6-1b2d-47eb-99ae-543b7b454813-kube-api-access-tzgw9\") pod \"c87e60d6-1b2d-47eb-99ae-543b7b454813\" (UID: \"c87e60d6-1b2d-47eb-99ae-543b7b454813\") " Dec 13 07:10:27 crc kubenswrapper[4644]: I1213 07:10:27.809783 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c87e60d6-1b2d-47eb-99ae-543b7b454813-ssh-key\") pod \"c87e60d6-1b2d-47eb-99ae-543b7b454813\" (UID: \"c87e60d6-1b2d-47eb-99ae-543b7b454813\") " Dec 13 07:10:27 crc kubenswrapper[4644]: I1213 07:10:27.815914 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c87e60d6-1b2d-47eb-99ae-543b7b454813-kube-api-access-tzgw9" (OuterVolumeSpecName: "kube-api-access-tzgw9") pod "c87e60d6-1b2d-47eb-99ae-543b7b454813" (UID: "c87e60d6-1b2d-47eb-99ae-543b7b454813"). InnerVolumeSpecName "kube-api-access-tzgw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:10:27 crc kubenswrapper[4644]: I1213 07:10:27.833243 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c87e60d6-1b2d-47eb-99ae-543b7b454813-inventory" (OuterVolumeSpecName: "inventory") pod "c87e60d6-1b2d-47eb-99ae-543b7b454813" (UID: "c87e60d6-1b2d-47eb-99ae-543b7b454813"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:10:27 crc kubenswrapper[4644]: I1213 07:10:27.834435 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c87e60d6-1b2d-47eb-99ae-543b7b454813-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c87e60d6-1b2d-47eb-99ae-543b7b454813" (UID: "c87e60d6-1b2d-47eb-99ae-543b7b454813"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:10:27 crc kubenswrapper[4644]: I1213 07:10:27.913225 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c87e60d6-1b2d-47eb-99ae-543b7b454813-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:10:27 crc kubenswrapper[4644]: I1213 07:10:27.913273 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzgw9\" (UniqueName: \"kubernetes.io/projected/c87e60d6-1b2d-47eb-99ae-543b7b454813-kube-api-access-tzgw9\") on node \"crc\" DevicePath \"\"" Dec 13 07:10:27 crc kubenswrapper[4644]: I1213 07:10:27.913284 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c87e60d6-1b2d-47eb-99ae-543b7b454813-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.421400 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" event={"ID":"c87e60d6-1b2d-47eb-99ae-543b7b454813","Type":"ContainerDied","Data":"543fd080feed5451ed4d46230c29fc530d6410819326b884069bcbe16334228c"} Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.421474 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.421481 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="543fd080feed5451ed4d46230c29fc530d6410819326b884069bcbe16334228c" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.489528 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t"] Dec 13 07:10:28 crc kubenswrapper[4644]: E1213 07:10:28.490185 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c87e60d6-1b2d-47eb-99ae-543b7b454813" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.490312 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="c87e60d6-1b2d-47eb-99ae-543b7b454813" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.490621 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="c87e60d6-1b2d-47eb-99ae-543b7b454813" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.491333 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.494673 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.495275 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.495803 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.495851 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.501600 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t"] Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.626778 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5kgh\" (UniqueName: \"kubernetes.io/projected/34b9af82-a9c7-4c74-a36c-4205d3ff4427-kube-api-access-q5kgh\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t\" (UID: \"34b9af82-a9c7-4c74-a36c-4205d3ff4427\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.626849 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34b9af82-a9c7-4c74-a36c-4205d3ff4427-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t\" (UID: \"34b9af82-a9c7-4c74-a36c-4205d3ff4427\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.627021 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34b9af82-a9c7-4c74-a36c-4205d3ff4427-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t\" (UID: \"34b9af82-a9c7-4c74-a36c-4205d3ff4427\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.729887 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5kgh\" (UniqueName: \"kubernetes.io/projected/34b9af82-a9c7-4c74-a36c-4205d3ff4427-kube-api-access-q5kgh\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t\" (UID: \"34b9af82-a9c7-4c74-a36c-4205d3ff4427\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.729963 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34b9af82-a9c7-4c74-a36c-4205d3ff4427-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t\" (UID: \"34b9af82-a9c7-4c74-a36c-4205d3ff4427\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.730005 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34b9af82-a9c7-4c74-a36c-4205d3ff4427-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t\" (UID: \"34b9af82-a9c7-4c74-a36c-4205d3ff4427\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.734041 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34b9af82-a9c7-4c74-a36c-4205d3ff4427-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t\" (UID: \"34b9af82-a9c7-4c74-a36c-4205d3ff4427\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.734056 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34b9af82-a9c7-4c74-a36c-4205d3ff4427-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t\" (UID: \"34b9af82-a9c7-4c74-a36c-4205d3ff4427\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.745246 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5kgh\" (UniqueName: \"kubernetes.io/projected/34b9af82-a9c7-4c74-a36c-4205d3ff4427-kube-api-access-q5kgh\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t\" (UID: \"34b9af82-a9c7-4c74-a36c-4205d3ff4427\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" Dec 13 07:10:28 crc kubenswrapper[4644]: I1213 07:10:28.806537 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" Dec 13 07:10:29 crc kubenswrapper[4644]: I1213 07:10:29.260665 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t"] Dec 13 07:10:29 crc kubenswrapper[4644]: I1213 07:10:29.429121 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" event={"ID":"34b9af82-a9c7-4c74-a36c-4205d3ff4427","Type":"ContainerStarted","Data":"d0dbcf9919b66f8d3c6905205157d1dc9c1cc5f36c2b4190987d64f18a1bae8f"} Dec 13 07:10:30 crc kubenswrapper[4644]: I1213 07:10:30.437631 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" event={"ID":"34b9af82-a9c7-4c74-a36c-4205d3ff4427","Type":"ContainerStarted","Data":"ebd2ac61733526bd5fd4823be477d0cb2a2ad20c05024f32ee7325ef587c24c8"} Dec 13 07:10:30 crc kubenswrapper[4644]: I1213 07:10:30.458791 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" podStartSLOduration=1.886527139 podStartE2EDuration="2.458771759s" podCreationTimestamp="2025-12-13 07:10:28 +0000 UTC" firstStartedPulling="2025-12-13 07:10:29.26670946 +0000 UTC m=+1491.481660293" lastFinishedPulling="2025-12-13 07:10:29.83895408 +0000 UTC m=+1492.053904913" observedRunningTime="2025-12-13 07:10:30.450864193 +0000 UTC m=+1492.665815025" watchObservedRunningTime="2025-12-13 07:10:30.458771759 +0000 UTC m=+1492.673722592" Dec 13 07:10:34 crc kubenswrapper[4644]: I1213 07:10:34.471013 4644 generic.go:334] "Generic (PLEG): container finished" podID="34b9af82-a9c7-4c74-a36c-4205d3ff4427" containerID="ebd2ac61733526bd5fd4823be477d0cb2a2ad20c05024f32ee7325ef587c24c8" exitCode=0 Dec 13 07:10:34 crc kubenswrapper[4644]: I1213 07:10:34.471091 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" event={"ID":"34b9af82-a9c7-4c74-a36c-4205d3ff4427","Type":"ContainerDied","Data":"ebd2ac61733526bd5fd4823be477d0cb2a2ad20c05024f32ee7325ef587c24c8"} Dec 13 07:10:35 crc kubenswrapper[4644]: I1213 07:10:35.804022 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" Dec 13 07:10:35 crc kubenswrapper[4644]: I1213 07:10:35.973970 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5kgh\" (UniqueName: \"kubernetes.io/projected/34b9af82-a9c7-4c74-a36c-4205d3ff4427-kube-api-access-q5kgh\") pod \"34b9af82-a9c7-4c74-a36c-4205d3ff4427\" (UID: \"34b9af82-a9c7-4c74-a36c-4205d3ff4427\") " Dec 13 07:10:35 crc kubenswrapper[4644]: I1213 07:10:35.974051 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34b9af82-a9c7-4c74-a36c-4205d3ff4427-inventory\") pod \"34b9af82-a9c7-4c74-a36c-4205d3ff4427\" (UID: \"34b9af82-a9c7-4c74-a36c-4205d3ff4427\") " Dec 13 07:10:35 crc kubenswrapper[4644]: I1213 07:10:35.974104 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34b9af82-a9c7-4c74-a36c-4205d3ff4427-ssh-key\") pod \"34b9af82-a9c7-4c74-a36c-4205d3ff4427\" (UID: \"34b9af82-a9c7-4c74-a36c-4205d3ff4427\") " Dec 13 07:10:35 crc kubenswrapper[4644]: I1213 07:10:35.980237 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34b9af82-a9c7-4c74-a36c-4205d3ff4427-kube-api-access-q5kgh" (OuterVolumeSpecName: "kube-api-access-q5kgh") pod "34b9af82-a9c7-4c74-a36c-4205d3ff4427" (UID: "34b9af82-a9c7-4c74-a36c-4205d3ff4427"). InnerVolumeSpecName "kube-api-access-q5kgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:10:35 crc kubenswrapper[4644]: I1213 07:10:35.998714 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34b9af82-a9c7-4c74-a36c-4205d3ff4427-inventory" (OuterVolumeSpecName: "inventory") pod "34b9af82-a9c7-4c74-a36c-4205d3ff4427" (UID: "34b9af82-a9c7-4c74-a36c-4205d3ff4427"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:10:35 crc kubenswrapper[4644]: I1213 07:10:35.998954 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34b9af82-a9c7-4c74-a36c-4205d3ff4427-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "34b9af82-a9c7-4c74-a36c-4205d3ff4427" (UID: "34b9af82-a9c7-4c74-a36c-4205d3ff4427"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.076352 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5kgh\" (UniqueName: \"kubernetes.io/projected/34b9af82-a9c7-4c74-a36c-4205d3ff4427-kube-api-access-q5kgh\") on node \"crc\" DevicePath \"\"" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.076380 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34b9af82-a9c7-4c74-a36c-4205d3ff4427-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.076394 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34b9af82-a9c7-4c74-a36c-4205d3ff4427-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.489117 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" event={"ID":"34b9af82-a9c7-4c74-a36c-4205d3ff4427","Type":"ContainerDied","Data":"d0dbcf9919b66f8d3c6905205157d1dc9c1cc5f36c2b4190987d64f18a1bae8f"} Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.489161 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0dbcf9919b66f8d3c6905205157d1dc9c1cc5f36c2b4190987d64f18a1bae8f" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.489566 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.542288 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr"] Dec 13 07:10:36 crc kubenswrapper[4644]: E1213 07:10:36.542755 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34b9af82-a9c7-4c74-a36c-4205d3ff4427" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.542779 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="34b9af82-a9c7-4c74-a36c-4205d3ff4427" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.543036 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="34b9af82-a9c7-4c74-a36c-4205d3ff4427" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.543652 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.545694 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.546203 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.547078 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.547726 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.551122 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr"] Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.687169 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b444632-d889-418a-83b5-2ce9234107a5-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpspr\" (UID: \"1b444632-d889-418a-83b5-2ce9234107a5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.687216 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1b444632-d889-418a-83b5-2ce9234107a5-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpspr\" (UID: \"1b444632-d889-418a-83b5-2ce9234107a5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.687358 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5z75\" (UniqueName: \"kubernetes.io/projected/1b444632-d889-418a-83b5-2ce9234107a5-kube-api-access-w5z75\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpspr\" (UID: \"1b444632-d889-418a-83b5-2ce9234107a5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.789687 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b444632-d889-418a-83b5-2ce9234107a5-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpspr\" (UID: \"1b444632-d889-418a-83b5-2ce9234107a5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.789753 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1b444632-d889-418a-83b5-2ce9234107a5-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpspr\" (UID: \"1b444632-d889-418a-83b5-2ce9234107a5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.789923 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5z75\" (UniqueName: \"kubernetes.io/projected/1b444632-d889-418a-83b5-2ce9234107a5-kube-api-access-w5z75\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpspr\" (UID: \"1b444632-d889-418a-83b5-2ce9234107a5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.794530 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b444632-d889-418a-83b5-2ce9234107a5-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpspr\" (UID: \"1b444632-d889-418a-83b5-2ce9234107a5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.794536 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1b444632-d889-418a-83b5-2ce9234107a5-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpspr\" (UID: \"1b444632-d889-418a-83b5-2ce9234107a5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.805589 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5z75\" (UniqueName: \"kubernetes.io/projected/1b444632-d889-418a-83b5-2ce9234107a5-kube-api-access-w5z75\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dpspr\" (UID: \"1b444632-d889-418a-83b5-2ce9234107a5\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" Dec 13 07:10:36 crc kubenswrapper[4644]: I1213 07:10:36.859855 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" Dec 13 07:10:37 crc kubenswrapper[4644]: I1213 07:10:37.320677 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr"] Dec 13 07:10:37 crc kubenswrapper[4644]: I1213 07:10:37.330158 4644 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 07:10:37 crc kubenswrapper[4644]: I1213 07:10:37.499667 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" event={"ID":"1b444632-d889-418a-83b5-2ce9234107a5","Type":"ContainerStarted","Data":"03167bfc40822117f0cde2be98f1549b1c26d1d6393575868cfce925562d68dd"} Dec 13 07:10:38 crc kubenswrapper[4644]: I1213 07:10:38.510111 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" event={"ID":"1b444632-d889-418a-83b5-2ce9234107a5","Type":"ContainerStarted","Data":"54aa967c5a507027dd9d70a787419415934af769780674e2094fbd9d52a2c130"} Dec 13 07:10:38 crc kubenswrapper[4644]: I1213 07:10:38.530681 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" podStartSLOduration=1.847273827 podStartE2EDuration="2.530661312s" podCreationTimestamp="2025-12-13 07:10:36 +0000 UTC" firstStartedPulling="2025-12-13 07:10:37.329903074 +0000 UTC m=+1499.544853907" lastFinishedPulling="2025-12-13 07:10:38.013290558 +0000 UTC m=+1500.228241392" observedRunningTime="2025-12-13 07:10:38.522057577 +0000 UTC m=+1500.737008409" watchObservedRunningTime="2025-12-13 07:10:38.530661312 +0000 UTC m=+1500.745612145" Dec 13 07:10:39 crc kubenswrapper[4644]: I1213 07:10:39.753695 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:10:39 crc kubenswrapper[4644]: I1213 07:10:39.754032 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:10:52 crc kubenswrapper[4644]: I1213 07:10:52.033942 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-f342-account-create-update-nlr8s"] Dec 13 07:10:52 crc kubenswrapper[4644]: I1213 07:10:52.042293 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-5m9w2"] Dec 13 07:10:52 crc kubenswrapper[4644]: I1213 07:10:52.049403 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-f342-account-create-update-nlr8s"] Dec 13 07:10:52 crc kubenswrapper[4644]: I1213 07:10:52.055567 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-5m9w2"] Dec 13 07:10:52 crc kubenswrapper[4644]: I1213 07:10:52.399504 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="426e7525-babf-4c8e-aa46-e8bfa39968f1" path="/var/lib/kubelet/pods/426e7525-babf-4c8e-aa46-e8bfa39968f1/volumes" Dec 13 07:10:52 crc kubenswrapper[4644]: I1213 07:10:52.400288 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76fa9df7-1c14-4734-94ba-8dc4d4b64ad3" path="/var/lib/kubelet/pods/76fa9df7-1c14-4734-94ba-8dc4d4b64ad3/volumes" Dec 13 07:10:53 crc kubenswrapper[4644]: I1213 07:10:53.032750 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-xfcdr"] Dec 13 07:10:53 crc kubenswrapper[4644]: I1213 07:10:53.040355 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-c77f-account-create-update-8jqkn"] Dec 13 07:10:53 crc kubenswrapper[4644]: I1213 07:10:53.048011 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-xfcdr"] Dec 13 07:10:53 crc kubenswrapper[4644]: I1213 07:10:53.060776 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-c77f-account-create-update-8jqkn"] Dec 13 07:10:54 crc kubenswrapper[4644]: I1213 07:10:54.397939 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17497e45-c9ee-486a-a743-651b0447f79d" path="/var/lib/kubelet/pods/17497e45-c9ee-486a-a743-651b0447f79d/volumes" Dec 13 07:10:54 crc kubenswrapper[4644]: I1213 07:10:54.398774 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c1b1c7c-3447-400f-a82e-0d6a130b815f" path="/var/lib/kubelet/pods/7c1b1c7c-3447-400f-a82e-0d6a130b815f/volumes" Dec 13 07:10:58 crc kubenswrapper[4644]: I1213 07:10:58.038972 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-wdksf"] Dec 13 07:10:58 crc kubenswrapper[4644]: I1213 07:10:58.046212 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-651a-account-create-update-v45dv"] Dec 13 07:10:58 crc kubenswrapper[4644]: I1213 07:10:58.053140 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-wdksf"] Dec 13 07:10:58 crc kubenswrapper[4644]: I1213 07:10:58.058360 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-651a-account-create-update-v45dv"] Dec 13 07:10:58 crc kubenswrapper[4644]: I1213 07:10:58.398862 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da31d5ac-6b38-41e4-a35f-8f627e375cc7" path="/var/lib/kubelet/pods/da31d5ac-6b38-41e4-a35f-8f627e375cc7/volumes" Dec 13 07:10:58 crc kubenswrapper[4644]: I1213 07:10:58.399591 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f169b2c3-efe1-46c6-bfdb-7a30052dbc20" path="/var/lib/kubelet/pods/f169b2c3-efe1-46c6-bfdb-7a30052dbc20/volumes" Dec 13 07:11:06 crc kubenswrapper[4644]: I1213 07:11:06.720439 4644 generic.go:334] "Generic (PLEG): container finished" podID="1b444632-d889-418a-83b5-2ce9234107a5" containerID="54aa967c5a507027dd9d70a787419415934af769780674e2094fbd9d52a2c130" exitCode=0 Dec 13 07:11:06 crc kubenswrapper[4644]: I1213 07:11:06.720492 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" event={"ID":"1b444632-d889-418a-83b5-2ce9234107a5","Type":"ContainerDied","Data":"54aa967c5a507027dd9d70a787419415934af769780674e2094fbd9d52a2c130"} Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.098969 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.117203 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b444632-d889-418a-83b5-2ce9234107a5-inventory\") pod \"1b444632-d889-418a-83b5-2ce9234107a5\" (UID: \"1b444632-d889-418a-83b5-2ce9234107a5\") " Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.117294 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5z75\" (UniqueName: \"kubernetes.io/projected/1b444632-d889-418a-83b5-2ce9234107a5-kube-api-access-w5z75\") pod \"1b444632-d889-418a-83b5-2ce9234107a5\" (UID: \"1b444632-d889-418a-83b5-2ce9234107a5\") " Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.117334 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1b444632-d889-418a-83b5-2ce9234107a5-ssh-key\") pod \"1b444632-d889-418a-83b5-2ce9234107a5\" (UID: \"1b444632-d889-418a-83b5-2ce9234107a5\") " Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.129629 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b444632-d889-418a-83b5-2ce9234107a5-kube-api-access-w5z75" (OuterVolumeSpecName: "kube-api-access-w5z75") pod "1b444632-d889-418a-83b5-2ce9234107a5" (UID: "1b444632-d889-418a-83b5-2ce9234107a5"). InnerVolumeSpecName "kube-api-access-w5z75". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.167539 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b444632-d889-418a-83b5-2ce9234107a5-inventory" (OuterVolumeSpecName: "inventory") pod "1b444632-d889-418a-83b5-2ce9234107a5" (UID: "1b444632-d889-418a-83b5-2ce9234107a5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.181599 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b444632-d889-418a-83b5-2ce9234107a5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1b444632-d889-418a-83b5-2ce9234107a5" (UID: "1b444632-d889-418a-83b5-2ce9234107a5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.223114 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1b444632-d889-418a-83b5-2ce9234107a5-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.223154 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1b444632-d889-418a-83b5-2ce9234107a5-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.223168 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5z75\" (UniqueName: \"kubernetes.io/projected/1b444632-d889-418a-83b5-2ce9234107a5-kube-api-access-w5z75\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.739528 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" event={"ID":"1b444632-d889-418a-83b5-2ce9234107a5","Type":"ContainerDied","Data":"03167bfc40822117f0cde2be98f1549b1c26d1d6393575868cfce925562d68dd"} Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.739863 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03167bfc40822117f0cde2be98f1549b1c26d1d6393575868cfce925562d68dd" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.739611 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.815226 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4"] Dec 13 07:11:08 crc kubenswrapper[4644]: E1213 07:11:08.815760 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b444632-d889-418a-83b5-2ce9234107a5" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.815786 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b444632-d889-418a-83b5-2ce9234107a5" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.815980 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b444632-d889-418a-83b5-2ce9234107a5" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.816665 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.818241 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.819358 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.819726 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.819854 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.827425 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4"] Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.935921 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4\" (UID: \"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.936107 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4\" (UID: \"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" Dec 13 07:11:08 crc kubenswrapper[4644]: I1213 07:11:08.936354 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9bhd\" (UniqueName: \"kubernetes.io/projected/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-kube-api-access-t9bhd\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4\" (UID: \"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" Dec 13 07:11:09 crc kubenswrapper[4644]: I1213 07:11:09.038865 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4\" (UID: \"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" Dec 13 07:11:09 crc kubenswrapper[4644]: I1213 07:11:09.038997 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4\" (UID: \"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" Dec 13 07:11:09 crc kubenswrapper[4644]: I1213 07:11:09.039097 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9bhd\" (UniqueName: \"kubernetes.io/projected/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-kube-api-access-t9bhd\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4\" (UID: \"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" Dec 13 07:11:09 crc kubenswrapper[4644]: I1213 07:11:09.043187 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4\" (UID: \"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" Dec 13 07:11:09 crc kubenswrapper[4644]: I1213 07:11:09.046669 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4\" (UID: \"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" Dec 13 07:11:09 crc kubenswrapper[4644]: I1213 07:11:09.055275 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9bhd\" (UniqueName: \"kubernetes.io/projected/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-kube-api-access-t9bhd\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4\" (UID: \"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" Dec 13 07:11:09 crc kubenswrapper[4644]: I1213 07:11:09.131726 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" Dec 13 07:11:09 crc kubenswrapper[4644]: I1213 07:11:09.606985 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4"] Dec 13 07:11:09 crc kubenswrapper[4644]: I1213 07:11:09.748662 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" event={"ID":"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4","Type":"ContainerStarted","Data":"76b31f818a3e3c23e0366b18ef58725c594421fba05bebe2bfe21d1c2768637c"} Dec 13 07:11:09 crc kubenswrapper[4644]: I1213 07:11:09.754285 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:11:09 crc kubenswrapper[4644]: I1213 07:11:09.754328 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:11:10 crc kubenswrapper[4644]: I1213 07:11:10.757376 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" event={"ID":"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4","Type":"ContainerStarted","Data":"6610b48084c0e3fa49e04d527357b0a08d8fb3977199d989b768d473c4e596eb"} Dec 13 07:11:10 crc kubenswrapper[4644]: I1213 07:11:10.772730 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" podStartSLOduration=2.268339043 podStartE2EDuration="2.772705838s" podCreationTimestamp="2025-12-13 07:11:08 +0000 UTC" firstStartedPulling="2025-12-13 07:11:09.619362712 +0000 UTC m=+1531.834313546" lastFinishedPulling="2025-12-13 07:11:10.123729508 +0000 UTC m=+1532.338680341" observedRunningTime="2025-12-13 07:11:10.771585702 +0000 UTC m=+1532.986536535" watchObservedRunningTime="2025-12-13 07:11:10.772705838 +0000 UTC m=+1532.987656670" Dec 13 07:11:13 crc kubenswrapper[4644]: I1213 07:11:13.783183 4644 generic.go:334] "Generic (PLEG): container finished" podID="fe9da418-f306-4918-a4f5-9c8b0b9ebfe4" containerID="6610b48084c0e3fa49e04d527357b0a08d8fb3977199d989b768d473c4e596eb" exitCode=0 Dec 13 07:11:13 crc kubenswrapper[4644]: I1213 07:11:13.783290 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" event={"ID":"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4","Type":"ContainerDied","Data":"6610b48084c0e3fa49e04d527357b0a08d8fb3977199d989b768d473c4e596eb"} Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.154861 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.254012 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-ssh-key\") pod \"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4\" (UID: \"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4\") " Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.254077 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-inventory\") pod \"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4\" (UID: \"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4\") " Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.254341 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9bhd\" (UniqueName: \"kubernetes.io/projected/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-kube-api-access-t9bhd\") pod \"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4\" (UID: \"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4\") " Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.259058 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-kube-api-access-t9bhd" (OuterVolumeSpecName: "kube-api-access-t9bhd") pod "fe9da418-f306-4918-a4f5-9c8b0b9ebfe4" (UID: "fe9da418-f306-4918-a4f5-9c8b0b9ebfe4"). InnerVolumeSpecName "kube-api-access-t9bhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.275352 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-inventory" (OuterVolumeSpecName: "inventory") pod "fe9da418-f306-4918-a4f5-9c8b0b9ebfe4" (UID: "fe9da418-f306-4918-a4f5-9c8b0b9ebfe4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.276127 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fe9da418-f306-4918-a4f5-9c8b0b9ebfe4" (UID: "fe9da418-f306-4918-a4f5-9c8b0b9ebfe4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.357296 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9bhd\" (UniqueName: \"kubernetes.io/projected/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-kube-api-access-t9bhd\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.357323 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.357333 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.799680 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" event={"ID":"fe9da418-f306-4918-a4f5-9c8b0b9ebfe4","Type":"ContainerDied","Data":"76b31f818a3e3c23e0366b18ef58725c594421fba05bebe2bfe21d1c2768637c"} Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.799910 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76b31f818a3e3c23e0366b18ef58725c594421fba05bebe2bfe21d1c2768637c" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.799739 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.866972 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x"] Dec 13 07:11:15 crc kubenswrapper[4644]: E1213 07:11:15.867374 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe9da418-f306-4918-a4f5-9c8b0b9ebfe4" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.867396 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe9da418-f306-4918-a4f5-9c8b0b9ebfe4" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.867605 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe9da418-f306-4918-a4f5-9c8b0b9ebfe4" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.868249 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.871995 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.872233 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.872374 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.872527 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.876897 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63da52dc-29de-43e7-b967-a687cff1c918-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x\" (UID: \"63da52dc-29de-43e7-b967-a687cff1c918\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.877014 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tm2m\" (UniqueName: \"kubernetes.io/projected/63da52dc-29de-43e7-b967-a687cff1c918-kube-api-access-9tm2m\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x\" (UID: \"63da52dc-29de-43e7-b967-a687cff1c918\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.877180 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63da52dc-29de-43e7-b967-a687cff1c918-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x\" (UID: \"63da52dc-29de-43e7-b967-a687cff1c918\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.880141 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x"] Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.980236 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63da52dc-29de-43e7-b967-a687cff1c918-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x\" (UID: \"63da52dc-29de-43e7-b967-a687cff1c918\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.980433 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63da52dc-29de-43e7-b967-a687cff1c918-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x\" (UID: \"63da52dc-29de-43e7-b967-a687cff1c918\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.980596 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tm2m\" (UniqueName: \"kubernetes.io/projected/63da52dc-29de-43e7-b967-a687cff1c918-kube-api-access-9tm2m\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x\" (UID: \"63da52dc-29de-43e7-b967-a687cff1c918\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.984353 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63da52dc-29de-43e7-b967-a687cff1c918-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x\" (UID: \"63da52dc-29de-43e7-b967-a687cff1c918\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.984918 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63da52dc-29de-43e7-b967-a687cff1c918-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x\" (UID: \"63da52dc-29de-43e7-b967-a687cff1c918\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" Dec 13 07:11:15 crc kubenswrapper[4644]: I1213 07:11:15.996293 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tm2m\" (UniqueName: \"kubernetes.io/projected/63da52dc-29de-43e7-b967-a687cff1c918-kube-api-access-9tm2m\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x\" (UID: \"63da52dc-29de-43e7-b967-a687cff1c918\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" Dec 13 07:11:16 crc kubenswrapper[4644]: I1213 07:11:16.187539 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" Dec 13 07:11:16 crc kubenswrapper[4644]: I1213 07:11:16.682382 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x"] Dec 13 07:11:16 crc kubenswrapper[4644]: I1213 07:11:16.808283 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" event={"ID":"63da52dc-29de-43e7-b967-a687cff1c918","Type":"ContainerStarted","Data":"e583027277fba9ac4746ab0bc6cb98e47b6e72102ec09aaa412864dcc3a818fa"} Dec 13 07:11:17 crc kubenswrapper[4644]: I1213 07:11:17.815828 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" event={"ID":"63da52dc-29de-43e7-b967-a687cff1c918","Type":"ContainerStarted","Data":"aeed9862032d73f7ea70ae609aa0ecb62c8f9b4e2dcf1c8e8f7e7b88d3e8aaed"} Dec 13 07:11:17 crc kubenswrapper[4644]: I1213 07:11:17.832083 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" podStartSLOduration=2.237801038 podStartE2EDuration="2.832060918s" podCreationTimestamp="2025-12-13 07:11:15 +0000 UTC" firstStartedPulling="2025-12-13 07:11:16.683398762 +0000 UTC m=+1538.898349595" lastFinishedPulling="2025-12-13 07:11:17.277658642 +0000 UTC m=+1539.492609475" observedRunningTime="2025-12-13 07:11:17.828336205 +0000 UTC m=+1540.043287038" watchObservedRunningTime="2025-12-13 07:11:17.832060918 +0000 UTC m=+1540.047011750" Dec 13 07:11:19 crc kubenswrapper[4644]: I1213 07:11:19.039371 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-lc7dc"] Dec 13 07:11:19 crc kubenswrapper[4644]: I1213 07:11:19.048514 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-lc7dc"] Dec 13 07:11:20 crc kubenswrapper[4644]: I1213 07:11:20.398021 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1204c9c-4441-44c3-8c02-cbf9c3c74a88" path="/var/lib/kubelet/pods/b1204c9c-4441-44c3-8c02-cbf9c3c74a88/volumes" Dec 13 07:11:32 crc kubenswrapper[4644]: I1213 07:11:32.034994 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-rf9cs"] Dec 13 07:11:32 crc kubenswrapper[4644]: I1213 07:11:32.044169 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-1e41-account-create-update-4wmw8"] Dec 13 07:11:32 crc kubenswrapper[4644]: I1213 07:11:32.051931 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-w4gfg"] Dec 13 07:11:32 crc kubenswrapper[4644]: I1213 07:11:32.059353 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-2dff-account-create-update-rlqft"] Dec 13 07:11:32 crc kubenswrapper[4644]: I1213 07:11:32.065878 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-rf9cs"] Dec 13 07:11:32 crc kubenswrapper[4644]: I1213 07:11:32.071801 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-2dff-account-create-update-rlqft"] Dec 13 07:11:32 crc kubenswrapper[4644]: I1213 07:11:32.076977 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-w4gfg"] Dec 13 07:11:32 crc kubenswrapper[4644]: I1213 07:11:32.081886 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-1e41-account-create-update-4wmw8"] Dec 13 07:11:32 crc kubenswrapper[4644]: I1213 07:11:32.412702 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1db0e56b-985b-427f-9afa-d525e9de505d" path="/var/lib/kubelet/pods/1db0e56b-985b-427f-9afa-d525e9de505d/volumes" Dec 13 07:11:32 crc kubenswrapper[4644]: I1213 07:11:32.413699 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ba20404-0531-48f1-894f-5903b3ff71ef" path="/var/lib/kubelet/pods/3ba20404-0531-48f1-894f-5903b3ff71ef/volumes" Dec 13 07:11:32 crc kubenswrapper[4644]: I1213 07:11:32.414666 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="486a00a0-302e-4867-b7e4-9d935d9278ac" path="/var/lib/kubelet/pods/486a00a0-302e-4867-b7e4-9d935d9278ac/volumes" Dec 13 07:11:32 crc kubenswrapper[4644]: I1213 07:11:32.415417 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="badba5c3-308b-457e-a988-f7f49a15e48a" path="/var/lib/kubelet/pods/badba5c3-308b-457e-a988-f7f49a15e48a/volumes" Dec 13 07:11:33 crc kubenswrapper[4644]: I1213 07:11:33.028313 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-cec9-account-create-update-nt5x6"] Dec 13 07:11:33 crc kubenswrapper[4644]: I1213 07:11:33.034736 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-cec9-account-create-update-nt5x6"] Dec 13 07:11:33 crc kubenswrapper[4644]: I1213 07:11:33.040786 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-twbxq"] Dec 13 07:11:33 crc kubenswrapper[4644]: I1213 07:11:33.046491 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-twbxq"] Dec 13 07:11:34 crc kubenswrapper[4644]: I1213 07:11:34.399742 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="423735a8-e97e-4ce8-aecf-287e8fe08713" path="/var/lib/kubelet/pods/423735a8-e97e-4ce8-aecf-287e8fe08713/volumes" Dec 13 07:11:34 crc kubenswrapper[4644]: I1213 07:11:34.401003 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3cda71d-8d2a-4987-b267-e5cfcd8dd753" path="/var/lib/kubelet/pods/c3cda71d-8d2a-4987-b267-e5cfcd8dd753/volumes" Dec 13 07:11:36 crc kubenswrapper[4644]: I1213 07:11:36.047421 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-brm8s"] Dec 13 07:11:36 crc kubenswrapper[4644]: I1213 07:11:36.055979 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-brm8s"] Dec 13 07:11:36 crc kubenswrapper[4644]: I1213 07:11:36.398913 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a92835b2-c7ad-4de9-bee6-ab7acbe20fb8" path="/var/lib/kubelet/pods/a92835b2-c7ad-4de9-bee6-ab7acbe20fb8/volumes" Dec 13 07:11:39 crc kubenswrapper[4644]: I1213 07:11:39.753844 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:11:39 crc kubenswrapper[4644]: I1213 07:11:39.754123 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:11:39 crc kubenswrapper[4644]: I1213 07:11:39.754174 4644 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 07:11:39 crc kubenswrapper[4644]: I1213 07:11:39.755075 4644 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130"} pod="openshift-machine-config-operator/machine-config-daemon-45tj4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 07:11:39 crc kubenswrapper[4644]: I1213 07:11:39.755135 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" containerID="cri-o://5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" gracePeriod=600 Dec 13 07:11:39 crc kubenswrapper[4644]: E1213 07:11:39.872871 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:11:39 crc kubenswrapper[4644]: I1213 07:11:39.987136 4644 generic.go:334] "Generic (PLEG): container finished" podID="48240f19-087e-4597-b448-ab1a190a5027" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" exitCode=0 Dec 13 07:11:39 crc kubenswrapper[4644]: I1213 07:11:39.987178 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerDied","Data":"5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130"} Dec 13 07:11:39 crc kubenswrapper[4644]: I1213 07:11:39.987213 4644 scope.go:117] "RemoveContainer" containerID="c27c345ea5c1252e6754fce190bc5091a29be12a2dec59380a2edf28ec34cb52" Dec 13 07:11:39 crc kubenswrapper[4644]: I1213 07:11:39.988056 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:11:39 crc kubenswrapper[4644]: E1213 07:11:39.988607 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:11:41 crc kubenswrapper[4644]: I1213 07:11:41.369627 4644 scope.go:117] "RemoveContainer" containerID="d60f19519e2d7f793086d5d5a5e433ea0346765c7e4bfd2b3c84b962126f6e80" Dec 13 07:11:41 crc kubenswrapper[4644]: I1213 07:11:41.390747 4644 scope.go:117] "RemoveContainer" containerID="a99b83b050cf98c6bdba04caf580b9eeb507e999adf66935482e96eb3f007008" Dec 13 07:11:41 crc kubenswrapper[4644]: I1213 07:11:41.421357 4644 scope.go:117] "RemoveContainer" containerID="d4b12782f57689df3cd61e9735567e6191173273e999caedaaac3513c6749256" Dec 13 07:11:41 crc kubenswrapper[4644]: I1213 07:11:41.462795 4644 scope.go:117] "RemoveContainer" containerID="3a45c840e9e2118188e237411a5b1b39a859ff5e7f166799a56aea923b964910" Dec 13 07:11:41 crc kubenswrapper[4644]: I1213 07:11:41.497167 4644 scope.go:117] "RemoveContainer" containerID="2e17f64ff924b1cd080c58ecb9b5c4d9c839e0fe4b253ca92b041ed4d718f21c" Dec 13 07:11:41 crc kubenswrapper[4644]: I1213 07:11:41.536496 4644 scope.go:117] "RemoveContainer" containerID="636f26a89a93e532cd2f574d0decf187977b0fad55f75b61415d39103173495f" Dec 13 07:11:41 crc kubenswrapper[4644]: I1213 07:11:41.569542 4644 scope.go:117] "RemoveContainer" containerID="eed90f160c8516ddb15f1542358bc940498ed2839865871c1607d14d97007cce" Dec 13 07:11:41 crc kubenswrapper[4644]: I1213 07:11:41.587316 4644 scope.go:117] "RemoveContainer" containerID="07ce829422994523568085b330c2f843d15342910ba58e5f830d189a884e623b" Dec 13 07:11:41 crc kubenswrapper[4644]: I1213 07:11:41.604978 4644 scope.go:117] "RemoveContainer" containerID="22f93573de6ca094d257b6b1bb3762bc574821fb9d5c740d032ced1b46b72942" Dec 13 07:11:41 crc kubenswrapper[4644]: I1213 07:11:41.637501 4644 scope.go:117] "RemoveContainer" containerID="c071450de3096cac70191b42fbf695c4db29ca7f41b8599b26297ddf026ba287" Dec 13 07:11:41 crc kubenswrapper[4644]: I1213 07:11:41.659496 4644 scope.go:117] "RemoveContainer" containerID="ecba3c6050340d8a7cca5e5586e13bcd67e498738dfbc0e2ddd1b1c804a60713" Dec 13 07:11:41 crc kubenswrapper[4644]: I1213 07:11:41.679705 4644 scope.go:117] "RemoveContainer" containerID="e89f03a1aadee20bd362b68848db4133a0c0e31d8b7676e914264157d2ff912f" Dec 13 07:11:41 crc kubenswrapper[4644]: I1213 07:11:41.702209 4644 scope.go:117] "RemoveContainer" containerID="fe63ee29370897c239004e85ce61a0e905f75c1a55221eaa658c0831ff790b73" Dec 13 07:11:41 crc kubenswrapper[4644]: I1213 07:11:41.722649 4644 scope.go:117] "RemoveContainer" containerID="185ea717dfc5086ba409c443e800ee4e9b3f3dce4f17bc951d6d54a42b9feacc" Dec 13 07:11:54 crc kubenswrapper[4644]: I1213 07:11:54.094359 4644 generic.go:334] "Generic (PLEG): container finished" podID="63da52dc-29de-43e7-b967-a687cff1c918" containerID="aeed9862032d73f7ea70ae609aa0ecb62c8f9b4e2dcf1c8e8f7e7b88d3e8aaed" exitCode=0 Dec 13 07:11:54 crc kubenswrapper[4644]: I1213 07:11:54.094455 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" event={"ID":"63da52dc-29de-43e7-b967-a687cff1c918","Type":"ContainerDied","Data":"aeed9862032d73f7ea70ae609aa0ecb62c8f9b4e2dcf1c8e8f7e7b88d3e8aaed"} Dec 13 07:11:54 crc kubenswrapper[4644]: I1213 07:11:54.389587 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:11:54 crc kubenswrapper[4644]: E1213 07:11:54.389848 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:11:55 crc kubenswrapper[4644]: I1213 07:11:55.448075 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" Dec 13 07:11:55 crc kubenswrapper[4644]: I1213 07:11:55.602399 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tm2m\" (UniqueName: \"kubernetes.io/projected/63da52dc-29de-43e7-b967-a687cff1c918-kube-api-access-9tm2m\") pod \"63da52dc-29de-43e7-b967-a687cff1c918\" (UID: \"63da52dc-29de-43e7-b967-a687cff1c918\") " Dec 13 07:11:55 crc kubenswrapper[4644]: I1213 07:11:55.602508 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63da52dc-29de-43e7-b967-a687cff1c918-ssh-key\") pod \"63da52dc-29de-43e7-b967-a687cff1c918\" (UID: \"63da52dc-29de-43e7-b967-a687cff1c918\") " Dec 13 07:11:55 crc kubenswrapper[4644]: I1213 07:11:55.602605 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63da52dc-29de-43e7-b967-a687cff1c918-inventory\") pod \"63da52dc-29de-43e7-b967-a687cff1c918\" (UID: \"63da52dc-29de-43e7-b967-a687cff1c918\") " Dec 13 07:11:55 crc kubenswrapper[4644]: I1213 07:11:55.618302 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63da52dc-29de-43e7-b967-a687cff1c918-kube-api-access-9tm2m" (OuterVolumeSpecName: "kube-api-access-9tm2m") pod "63da52dc-29de-43e7-b967-a687cff1c918" (UID: "63da52dc-29de-43e7-b967-a687cff1c918"). InnerVolumeSpecName "kube-api-access-9tm2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:11:55 crc kubenswrapper[4644]: I1213 07:11:55.631600 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63da52dc-29de-43e7-b967-a687cff1c918-inventory" (OuterVolumeSpecName: "inventory") pod "63da52dc-29de-43e7-b967-a687cff1c918" (UID: "63da52dc-29de-43e7-b967-a687cff1c918"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:11:55 crc kubenswrapper[4644]: I1213 07:11:55.631965 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63da52dc-29de-43e7-b967-a687cff1c918-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "63da52dc-29de-43e7-b967-a687cff1c918" (UID: "63da52dc-29de-43e7-b967-a687cff1c918"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:11:55 crc kubenswrapper[4644]: I1213 07:11:55.705542 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tm2m\" (UniqueName: \"kubernetes.io/projected/63da52dc-29de-43e7-b967-a687cff1c918-kube-api-access-9tm2m\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:55 crc kubenswrapper[4644]: I1213 07:11:55.705577 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63da52dc-29de-43e7-b967-a687cff1c918-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:55 crc kubenswrapper[4644]: I1213 07:11:55.705587 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63da52dc-29de-43e7-b967-a687cff1c918-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.120056 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" event={"ID":"63da52dc-29de-43e7-b967-a687cff1c918","Type":"ContainerDied","Data":"e583027277fba9ac4746ab0bc6cb98e47b6e72102ec09aaa412864dcc3a818fa"} Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.120115 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e583027277fba9ac4746ab0bc6cb98e47b6e72102ec09aaa412864dcc3a818fa" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.120124 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.176421 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-6g9cs"] Dec 13 07:11:56 crc kubenswrapper[4644]: E1213 07:11:56.176901 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63da52dc-29de-43e7-b967-a687cff1c918" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.176925 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="63da52dc-29de-43e7-b967-a687cff1c918" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.177150 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="63da52dc-29de-43e7-b967-a687cff1c918" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.177848 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.180173 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.180261 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.180627 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.180725 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.189919 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-6g9cs"] Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.213885 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-6g9cs\" (UID: \"7d41e3c7-46ae-442b-a11c-4ddedb2c6398\") " pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.214293 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-6g9cs\" (UID: \"7d41e3c7-46ae-442b-a11c-4ddedb2c6398\") " pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.214537 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mff9v\" (UniqueName: \"kubernetes.io/projected/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-kube-api-access-mff9v\") pod \"ssh-known-hosts-edpm-deployment-6g9cs\" (UID: \"7d41e3c7-46ae-442b-a11c-4ddedb2c6398\") " pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.315982 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-6g9cs\" (UID: \"7d41e3c7-46ae-442b-a11c-4ddedb2c6398\") " pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.316169 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mff9v\" (UniqueName: \"kubernetes.io/projected/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-kube-api-access-mff9v\") pod \"ssh-known-hosts-edpm-deployment-6g9cs\" (UID: \"7d41e3c7-46ae-442b-a11c-4ddedb2c6398\") " pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.316291 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-6g9cs\" (UID: \"7d41e3c7-46ae-442b-a11c-4ddedb2c6398\") " pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.321757 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-6g9cs\" (UID: \"7d41e3c7-46ae-442b-a11c-4ddedb2c6398\") " pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.321803 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-6g9cs\" (UID: \"7d41e3c7-46ae-442b-a11c-4ddedb2c6398\") " pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.333734 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mff9v\" (UniqueName: \"kubernetes.io/projected/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-kube-api-access-mff9v\") pod \"ssh-known-hosts-edpm-deployment-6g9cs\" (UID: \"7d41e3c7-46ae-442b-a11c-4ddedb2c6398\") " pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.495842 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" Dec 13 07:11:56 crc kubenswrapper[4644]: I1213 07:11:56.937817 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-6g9cs"] Dec 13 07:11:57 crc kubenswrapper[4644]: I1213 07:11:57.129082 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" event={"ID":"7d41e3c7-46ae-442b-a11c-4ddedb2c6398","Type":"ContainerStarted","Data":"8e91e7a226bc589a06dc262de0c5a3a668a3a539cb0a60894cc0b1d07f49a69d"} Dec 13 07:11:58 crc kubenswrapper[4644]: I1213 07:11:58.136546 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" event={"ID":"7d41e3c7-46ae-442b-a11c-4ddedb2c6398","Type":"ContainerStarted","Data":"5071c8bac49bc486d035a7c60dabb88d797ccf06913a6e4c3b95479214e8ecd1"} Dec 13 07:11:58 crc kubenswrapper[4644]: I1213 07:11:58.149952 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" podStartSLOduration=1.4993168909999999 podStartE2EDuration="2.149933986s" podCreationTimestamp="2025-12-13 07:11:56 +0000 UTC" firstStartedPulling="2025-12-13 07:11:56.955843433 +0000 UTC m=+1579.170794265" lastFinishedPulling="2025-12-13 07:11:57.606460527 +0000 UTC m=+1579.821411360" observedRunningTime="2025-12-13 07:11:58.148356169 +0000 UTC m=+1580.363307002" watchObservedRunningTime="2025-12-13 07:11:58.149933986 +0000 UTC m=+1580.364884818" Dec 13 07:12:00 crc kubenswrapper[4644]: I1213 07:12:00.037607 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-542cr"] Dec 13 07:12:00 crc kubenswrapper[4644]: I1213 07:12:00.046715 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-542cr"] Dec 13 07:12:00 crc kubenswrapper[4644]: I1213 07:12:00.398284 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae5ec083-82fc-4a1b-826f-50536ee5fcd0" path="/var/lib/kubelet/pods/ae5ec083-82fc-4a1b-826f-50536ee5fcd0/volumes" Dec 13 07:12:03 crc kubenswrapper[4644]: I1213 07:12:03.174642 4644 generic.go:334] "Generic (PLEG): container finished" podID="7d41e3c7-46ae-442b-a11c-4ddedb2c6398" containerID="5071c8bac49bc486d035a7c60dabb88d797ccf06913a6e4c3b95479214e8ecd1" exitCode=0 Dec 13 07:12:03 crc kubenswrapper[4644]: I1213 07:12:03.174726 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" event={"ID":"7d41e3c7-46ae-442b-a11c-4ddedb2c6398","Type":"ContainerDied","Data":"5071c8bac49bc486d035a7c60dabb88d797ccf06913a6e4c3b95479214e8ecd1"} Dec 13 07:12:04 crc kubenswrapper[4644]: I1213 07:12:04.508617 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" Dec 13 07:12:04 crc kubenswrapper[4644]: I1213 07:12:04.578742 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-ssh-key-openstack-edpm-ipam\") pod \"7d41e3c7-46ae-442b-a11c-4ddedb2c6398\" (UID: \"7d41e3c7-46ae-442b-a11c-4ddedb2c6398\") " Dec 13 07:12:04 crc kubenswrapper[4644]: I1213 07:12:04.578816 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-inventory-0\") pod \"7d41e3c7-46ae-442b-a11c-4ddedb2c6398\" (UID: \"7d41e3c7-46ae-442b-a11c-4ddedb2c6398\") " Dec 13 07:12:04 crc kubenswrapper[4644]: I1213 07:12:04.578913 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mff9v\" (UniqueName: \"kubernetes.io/projected/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-kube-api-access-mff9v\") pod \"7d41e3c7-46ae-442b-a11c-4ddedb2c6398\" (UID: \"7d41e3c7-46ae-442b-a11c-4ddedb2c6398\") " Dec 13 07:12:04 crc kubenswrapper[4644]: I1213 07:12:04.582973 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-kube-api-access-mff9v" (OuterVolumeSpecName: "kube-api-access-mff9v") pod "7d41e3c7-46ae-442b-a11c-4ddedb2c6398" (UID: "7d41e3c7-46ae-442b-a11c-4ddedb2c6398"). InnerVolumeSpecName "kube-api-access-mff9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:12:04 crc kubenswrapper[4644]: I1213 07:12:04.598770 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "7d41e3c7-46ae-442b-a11c-4ddedb2c6398" (UID: "7d41e3c7-46ae-442b-a11c-4ddedb2c6398"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:12:04 crc kubenswrapper[4644]: I1213 07:12:04.600179 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "7d41e3c7-46ae-442b-a11c-4ddedb2c6398" (UID: "7d41e3c7-46ae-442b-a11c-4ddedb2c6398"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:12:04 crc kubenswrapper[4644]: I1213 07:12:04.681371 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 13 07:12:04 crc kubenswrapper[4644]: I1213 07:12:04.681404 4644 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 13 07:12:04 crc kubenswrapper[4644]: I1213 07:12:04.681416 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mff9v\" (UniqueName: \"kubernetes.io/projected/7d41e3c7-46ae-442b-a11c-4ddedb2c6398-kube-api-access-mff9v\") on node \"crc\" DevicePath \"\"" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.189713 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" event={"ID":"7d41e3c7-46ae-442b-a11c-4ddedb2c6398","Type":"ContainerDied","Data":"8e91e7a226bc589a06dc262de0c5a3a668a3a539cb0a60894cc0b1d07f49a69d"} Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.189756 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e91e7a226bc589a06dc262de0c5a3a668a3a539cb0a60894cc0b1d07f49a69d" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.189811 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-6g9cs" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.241250 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l"] Dec 13 07:12:05 crc kubenswrapper[4644]: E1213 07:12:05.241649 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d41e3c7-46ae-442b-a11c-4ddedb2c6398" containerName="ssh-known-hosts-edpm-deployment" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.241668 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d41e3c7-46ae-442b-a11c-4ddedb2c6398" containerName="ssh-known-hosts-edpm-deployment" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.241827 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d41e3c7-46ae-442b-a11c-4ddedb2c6398" containerName="ssh-known-hosts-edpm-deployment" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.242402 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.245833 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.246541 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.247117 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.247515 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.257687 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l"] Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.293263 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpg67\" (UniqueName: \"kubernetes.io/projected/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-kube-api-access-wpg67\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6zn7l\" (UID: \"8bf97ceb-19ea-4495-9d8a-5dc150074bd5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.293417 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6zn7l\" (UID: \"8bf97ceb-19ea-4495-9d8a-5dc150074bd5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.293527 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6zn7l\" (UID: \"8bf97ceb-19ea-4495-9d8a-5dc150074bd5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.389749 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:12:05 crc kubenswrapper[4644]: E1213 07:12:05.390469 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.394947 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpg67\" (UniqueName: \"kubernetes.io/projected/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-kube-api-access-wpg67\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6zn7l\" (UID: \"8bf97ceb-19ea-4495-9d8a-5dc150074bd5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.395078 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6zn7l\" (UID: \"8bf97ceb-19ea-4495-9d8a-5dc150074bd5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.395160 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6zn7l\" (UID: \"8bf97ceb-19ea-4495-9d8a-5dc150074bd5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.399839 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6zn7l\" (UID: \"8bf97ceb-19ea-4495-9d8a-5dc150074bd5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.399899 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6zn7l\" (UID: \"8bf97ceb-19ea-4495-9d8a-5dc150074bd5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.410919 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpg67\" (UniqueName: \"kubernetes.io/projected/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-kube-api-access-wpg67\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6zn7l\" (UID: \"8bf97ceb-19ea-4495-9d8a-5dc150074bd5\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.557709 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" Dec 13 07:12:05 crc kubenswrapper[4644]: I1213 07:12:05.996737 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l"] Dec 13 07:12:06 crc kubenswrapper[4644]: I1213 07:12:06.026800 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-bk2fx"] Dec 13 07:12:06 crc kubenswrapper[4644]: I1213 07:12:06.035197 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-bk2fx"] Dec 13 07:12:06 crc kubenswrapper[4644]: I1213 07:12:06.197577 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" event={"ID":"8bf97ceb-19ea-4495-9d8a-5dc150074bd5","Type":"ContainerStarted","Data":"9684f6c9cd40c4194d1809eda95e5b896ccf20e60c4e3bb15b146dd9506d7dea"} Dec 13 07:12:06 crc kubenswrapper[4644]: I1213 07:12:06.398745 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d3803d2-f0df-48c8-ba36-8a1cffdc262e" path="/var/lib/kubelet/pods/2d3803d2-f0df-48c8-ba36-8a1cffdc262e/volumes" Dec 13 07:12:07 crc kubenswrapper[4644]: I1213 07:12:07.206596 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" event={"ID":"8bf97ceb-19ea-4495-9d8a-5dc150074bd5","Type":"ContainerStarted","Data":"7acf449744166db33db46a3f468f94fc258d92029d295bfac1d828aa727716a6"} Dec 13 07:12:07 crc kubenswrapper[4644]: I1213 07:12:07.229600 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" podStartSLOduration=1.664327381 podStartE2EDuration="2.229578161s" podCreationTimestamp="2025-12-13 07:12:05 +0000 UTC" firstStartedPulling="2025-12-13 07:12:06.002082676 +0000 UTC m=+1588.217033509" lastFinishedPulling="2025-12-13 07:12:06.567333457 +0000 UTC m=+1588.782284289" observedRunningTime="2025-12-13 07:12:07.221524781 +0000 UTC m=+1589.436475614" watchObservedRunningTime="2025-12-13 07:12:07.229578161 +0000 UTC m=+1589.444528994" Dec 13 07:12:08 crc kubenswrapper[4644]: I1213 07:12:08.026948 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-gp49b"] Dec 13 07:12:08 crc kubenswrapper[4644]: I1213 07:12:08.038685 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-gp49b"] Dec 13 07:12:08 crc kubenswrapper[4644]: I1213 07:12:08.398005 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02c6c85f-c08e-4317-b3fc-35689c19bade" path="/var/lib/kubelet/pods/02c6c85f-c08e-4317-b3fc-35689c19bade/volumes" Dec 13 07:12:13 crc kubenswrapper[4644]: I1213 07:12:13.251936 4644 generic.go:334] "Generic (PLEG): container finished" podID="8bf97ceb-19ea-4495-9d8a-5dc150074bd5" containerID="7acf449744166db33db46a3f468f94fc258d92029d295bfac1d828aa727716a6" exitCode=0 Dec 13 07:12:13 crc kubenswrapper[4644]: I1213 07:12:13.252017 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" event={"ID":"8bf97ceb-19ea-4495-9d8a-5dc150074bd5","Type":"ContainerDied","Data":"7acf449744166db33db46a3f468f94fc258d92029d295bfac1d828aa727716a6"} Dec 13 07:12:14 crc kubenswrapper[4644]: I1213 07:12:14.573652 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" Dec 13 07:12:14 crc kubenswrapper[4644]: I1213 07:12:14.681852 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-ssh-key\") pod \"8bf97ceb-19ea-4495-9d8a-5dc150074bd5\" (UID: \"8bf97ceb-19ea-4495-9d8a-5dc150074bd5\") " Dec 13 07:12:14 crc kubenswrapper[4644]: I1213 07:12:14.682090 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-inventory\") pod \"8bf97ceb-19ea-4495-9d8a-5dc150074bd5\" (UID: \"8bf97ceb-19ea-4495-9d8a-5dc150074bd5\") " Dec 13 07:12:14 crc kubenswrapper[4644]: I1213 07:12:14.682236 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpg67\" (UniqueName: \"kubernetes.io/projected/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-kube-api-access-wpg67\") pod \"8bf97ceb-19ea-4495-9d8a-5dc150074bd5\" (UID: \"8bf97ceb-19ea-4495-9d8a-5dc150074bd5\") " Dec 13 07:12:14 crc kubenswrapper[4644]: I1213 07:12:14.687583 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-kube-api-access-wpg67" (OuterVolumeSpecName: "kube-api-access-wpg67") pod "8bf97ceb-19ea-4495-9d8a-5dc150074bd5" (UID: "8bf97ceb-19ea-4495-9d8a-5dc150074bd5"). InnerVolumeSpecName "kube-api-access-wpg67". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:12:14 crc kubenswrapper[4644]: I1213 07:12:14.704283 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-inventory" (OuterVolumeSpecName: "inventory") pod "8bf97ceb-19ea-4495-9d8a-5dc150074bd5" (UID: "8bf97ceb-19ea-4495-9d8a-5dc150074bd5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:12:14 crc kubenswrapper[4644]: I1213 07:12:14.704644 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8bf97ceb-19ea-4495-9d8a-5dc150074bd5" (UID: "8bf97ceb-19ea-4495-9d8a-5dc150074bd5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:12:14 crc kubenswrapper[4644]: I1213 07:12:14.785177 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:12:14 crc kubenswrapper[4644]: I1213 07:12:14.785212 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpg67\" (UniqueName: \"kubernetes.io/projected/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-kube-api-access-wpg67\") on node \"crc\" DevicePath \"\"" Dec 13 07:12:14 crc kubenswrapper[4644]: I1213 07:12:14.785223 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8bf97ceb-19ea-4495-9d8a-5dc150074bd5-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.268391 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" event={"ID":"8bf97ceb-19ea-4495-9d8a-5dc150074bd5","Type":"ContainerDied","Data":"9684f6c9cd40c4194d1809eda95e5b896ccf20e60c4e3bb15b146dd9506d7dea"} Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.268455 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9684f6c9cd40c4194d1809eda95e5b896ccf20e60c4e3bb15b146dd9506d7dea" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.268509 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.327524 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2"] Dec 13 07:12:15 crc kubenswrapper[4644]: E1213 07:12:15.327936 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf97ceb-19ea-4495-9d8a-5dc150074bd5" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.327955 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf97ceb-19ea-4495-9d8a-5dc150074bd5" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.328114 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf97ceb-19ea-4495-9d8a-5dc150074bd5" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.328759 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.330663 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.330887 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.331183 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.332373 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.337606 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2"] Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.396815 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2\" (UID: \"d241dfce-7e9d-4f7b-9869-aa23e3d3569f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.397163 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rnsz\" (UniqueName: \"kubernetes.io/projected/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-kube-api-access-9rnsz\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2\" (UID: \"d241dfce-7e9d-4f7b-9869-aa23e3d3569f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.397227 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2\" (UID: \"d241dfce-7e9d-4f7b-9869-aa23e3d3569f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.499419 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2\" (UID: \"d241dfce-7e9d-4f7b-9869-aa23e3d3569f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.499526 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rnsz\" (UniqueName: \"kubernetes.io/projected/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-kube-api-access-9rnsz\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2\" (UID: \"d241dfce-7e9d-4f7b-9869-aa23e3d3569f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.499582 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2\" (UID: \"d241dfce-7e9d-4f7b-9869-aa23e3d3569f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.504965 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2\" (UID: \"d241dfce-7e9d-4f7b-9869-aa23e3d3569f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.505261 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2\" (UID: \"d241dfce-7e9d-4f7b-9869-aa23e3d3569f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.514722 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rnsz\" (UniqueName: \"kubernetes.io/projected/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-kube-api-access-9rnsz\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2\" (UID: \"d241dfce-7e9d-4f7b-9869-aa23e3d3569f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" Dec 13 07:12:15 crc kubenswrapper[4644]: I1213 07:12:15.642040 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" Dec 13 07:12:16 crc kubenswrapper[4644]: I1213 07:12:16.096731 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2"] Dec 13 07:12:16 crc kubenswrapper[4644]: I1213 07:12:16.278477 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" event={"ID":"d241dfce-7e9d-4f7b-9869-aa23e3d3569f","Type":"ContainerStarted","Data":"81a928b0ba54f7a94c4e79532da116a368d33256fa475541488b94eeebf6409f"} Dec 13 07:12:17 crc kubenswrapper[4644]: I1213 07:12:17.286996 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" event={"ID":"d241dfce-7e9d-4f7b-9869-aa23e3d3569f","Type":"ContainerStarted","Data":"8a09bfe52832d6e83a1e04ebc7bab3b5b1eb564faadd8a242c8ba8c990ec05b8"} Dec 13 07:12:17 crc kubenswrapper[4644]: I1213 07:12:17.305519 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" podStartSLOduration=1.5424677390000001 podStartE2EDuration="2.305497706s" podCreationTimestamp="2025-12-13 07:12:15 +0000 UTC" firstStartedPulling="2025-12-13 07:12:16.101886324 +0000 UTC m=+1598.316837157" lastFinishedPulling="2025-12-13 07:12:16.864916291 +0000 UTC m=+1599.079867124" observedRunningTime="2025-12-13 07:12:17.299787992 +0000 UTC m=+1599.514738824" watchObservedRunningTime="2025-12-13 07:12:17.305497706 +0000 UTC m=+1599.520448559" Dec 13 07:12:17 crc kubenswrapper[4644]: I1213 07:12:17.390093 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:12:17 crc kubenswrapper[4644]: E1213 07:12:17.390495 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:12:19 crc kubenswrapper[4644]: I1213 07:12:19.033115 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-fdgxf"] Dec 13 07:12:19 crc kubenswrapper[4644]: I1213 07:12:19.042729 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-fdgxf"] Dec 13 07:12:20 crc kubenswrapper[4644]: I1213 07:12:20.398321 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf873768-3def-4a7c-b48b-fb3749f8c927" path="/var/lib/kubelet/pods/bf873768-3def-4a7c-b48b-fb3749f8c927/volumes" Dec 13 07:12:22 crc kubenswrapper[4644]: I1213 07:12:22.026959 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-dwfrr"] Dec 13 07:12:22 crc kubenswrapper[4644]: I1213 07:12:22.033140 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-dwfrr"] Dec 13 07:12:22 crc kubenswrapper[4644]: I1213 07:12:22.399243 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f264a4d-92e3-4a69-a07f-00a3f0802484" path="/var/lib/kubelet/pods/2f264a4d-92e3-4a69-a07f-00a3f0802484/volumes" Dec 13 07:12:24 crc kubenswrapper[4644]: I1213 07:12:24.339863 4644 generic.go:334] "Generic (PLEG): container finished" podID="d241dfce-7e9d-4f7b-9869-aa23e3d3569f" containerID="8a09bfe52832d6e83a1e04ebc7bab3b5b1eb564faadd8a242c8ba8c990ec05b8" exitCode=0 Dec 13 07:12:24 crc kubenswrapper[4644]: I1213 07:12:24.339952 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" event={"ID":"d241dfce-7e9d-4f7b-9869-aa23e3d3569f","Type":"ContainerDied","Data":"8a09bfe52832d6e83a1e04ebc7bab3b5b1eb564faadd8a242c8ba8c990ec05b8"} Dec 13 07:12:25 crc kubenswrapper[4644]: I1213 07:12:25.684332 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" Dec 13 07:12:25 crc kubenswrapper[4644]: I1213 07:12:25.801949 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-ssh-key\") pod \"d241dfce-7e9d-4f7b-9869-aa23e3d3569f\" (UID: \"d241dfce-7e9d-4f7b-9869-aa23e3d3569f\") " Dec 13 07:12:25 crc kubenswrapper[4644]: I1213 07:12:25.802003 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rnsz\" (UniqueName: \"kubernetes.io/projected/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-kube-api-access-9rnsz\") pod \"d241dfce-7e9d-4f7b-9869-aa23e3d3569f\" (UID: \"d241dfce-7e9d-4f7b-9869-aa23e3d3569f\") " Dec 13 07:12:25 crc kubenswrapper[4644]: I1213 07:12:25.802165 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-inventory\") pod \"d241dfce-7e9d-4f7b-9869-aa23e3d3569f\" (UID: \"d241dfce-7e9d-4f7b-9869-aa23e3d3569f\") " Dec 13 07:12:25 crc kubenswrapper[4644]: I1213 07:12:25.808748 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-kube-api-access-9rnsz" (OuterVolumeSpecName: "kube-api-access-9rnsz") pod "d241dfce-7e9d-4f7b-9869-aa23e3d3569f" (UID: "d241dfce-7e9d-4f7b-9869-aa23e3d3569f"). InnerVolumeSpecName "kube-api-access-9rnsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:12:25 crc kubenswrapper[4644]: I1213 07:12:25.823224 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-inventory" (OuterVolumeSpecName: "inventory") pod "d241dfce-7e9d-4f7b-9869-aa23e3d3569f" (UID: "d241dfce-7e9d-4f7b-9869-aa23e3d3569f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:12:25 crc kubenswrapper[4644]: I1213 07:12:25.823247 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d241dfce-7e9d-4f7b-9869-aa23e3d3569f" (UID: "d241dfce-7e9d-4f7b-9869-aa23e3d3569f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:12:25 crc kubenswrapper[4644]: I1213 07:12:25.904557 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:12:25 crc kubenswrapper[4644]: I1213 07:12:25.904592 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rnsz\" (UniqueName: \"kubernetes.io/projected/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-kube-api-access-9rnsz\") on node \"crc\" DevicePath \"\"" Dec 13 07:12:25 crc kubenswrapper[4644]: I1213 07:12:25.904605 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d241dfce-7e9d-4f7b-9869-aa23e3d3569f-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:12:26 crc kubenswrapper[4644]: I1213 07:12:26.356758 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" event={"ID":"d241dfce-7e9d-4f7b-9869-aa23e3d3569f","Type":"ContainerDied","Data":"81a928b0ba54f7a94c4e79532da116a368d33256fa475541488b94eeebf6409f"} Dec 13 07:12:26 crc kubenswrapper[4644]: I1213 07:12:26.356799 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81a928b0ba54f7a94c4e79532da116a368d33256fa475541488b94eeebf6409f" Dec 13 07:12:26 crc kubenswrapper[4644]: I1213 07:12:26.356802 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2" Dec 13 07:12:30 crc kubenswrapper[4644]: I1213 07:12:30.390146 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:12:30 crc kubenswrapper[4644]: E1213 07:12:30.390717 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:12:41 crc kubenswrapper[4644]: I1213 07:12:41.390058 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:12:41 crc kubenswrapper[4644]: E1213 07:12:41.391005 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:12:41 crc kubenswrapper[4644]: I1213 07:12:41.934520 4644 scope.go:117] "RemoveContainer" containerID="eb29f4b138a392e8437ec9668608bb64c52366e4b34b2e5b07bbacc1e0f9ca25" Dec 13 07:12:41 crc kubenswrapper[4644]: I1213 07:12:41.969348 4644 scope.go:117] "RemoveContainer" containerID="16bffb83fecd6ae0a5ff90730e795f5ec4470569589b7510f35693b5e0f33b0b" Dec 13 07:12:41 crc kubenswrapper[4644]: I1213 07:12:41.997897 4644 scope.go:117] "RemoveContainer" containerID="83ad15e74ba901753f87023a8a502c26d06b4f90b1a970597075c9488660170e" Dec 13 07:12:42 crc kubenswrapper[4644]: I1213 07:12:42.042469 4644 scope.go:117] "RemoveContainer" containerID="9207ac8bfea191ebadb1a4cf6b5338286a7fec14c62b9337f1b91d5a4bf571ea" Dec 13 07:12:42 crc kubenswrapper[4644]: I1213 07:12:42.075392 4644 scope.go:117] "RemoveContainer" containerID="8494c487ab0cf5ce802aa106153b692bb8e7ed1afb1731753dc2d7f6f2dc1c09" Dec 13 07:12:55 crc kubenswrapper[4644]: I1213 07:12:55.942518 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6z4jl"] Dec 13 07:12:55 crc kubenswrapper[4644]: E1213 07:12:55.943470 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d241dfce-7e9d-4f7b-9869-aa23e3d3569f" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:12:55 crc kubenswrapper[4644]: I1213 07:12:55.943489 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="d241dfce-7e9d-4f7b-9869-aa23e3d3569f" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:12:55 crc kubenswrapper[4644]: I1213 07:12:55.943720 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="d241dfce-7e9d-4f7b-9869-aa23e3d3569f" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:12:55 crc kubenswrapper[4644]: I1213 07:12:55.945084 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:12:55 crc kubenswrapper[4644]: I1213 07:12:55.951659 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6z4jl"] Dec 13 07:12:55 crc kubenswrapper[4644]: I1213 07:12:55.995575 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c060a29-f196-464c-a1c7-56ef8805e4f1-catalog-content\") pod \"certified-operators-6z4jl\" (UID: \"4c060a29-f196-464c-a1c7-56ef8805e4f1\") " pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:12:55 crc kubenswrapper[4644]: I1213 07:12:55.995759 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xp49z\" (UniqueName: \"kubernetes.io/projected/4c060a29-f196-464c-a1c7-56ef8805e4f1-kube-api-access-xp49z\") pod \"certified-operators-6z4jl\" (UID: \"4c060a29-f196-464c-a1c7-56ef8805e4f1\") " pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:12:55 crc kubenswrapper[4644]: I1213 07:12:55.995782 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c060a29-f196-464c-a1c7-56ef8805e4f1-utilities\") pod \"certified-operators-6z4jl\" (UID: \"4c060a29-f196-464c-a1c7-56ef8805e4f1\") " pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:12:56 crc kubenswrapper[4644]: I1213 07:12:56.097732 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c060a29-f196-464c-a1c7-56ef8805e4f1-catalog-content\") pod \"certified-operators-6z4jl\" (UID: \"4c060a29-f196-464c-a1c7-56ef8805e4f1\") " pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:12:56 crc kubenswrapper[4644]: I1213 07:12:56.097984 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xp49z\" (UniqueName: \"kubernetes.io/projected/4c060a29-f196-464c-a1c7-56ef8805e4f1-kube-api-access-xp49z\") pod \"certified-operators-6z4jl\" (UID: \"4c060a29-f196-464c-a1c7-56ef8805e4f1\") " pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:12:56 crc kubenswrapper[4644]: I1213 07:12:56.098017 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c060a29-f196-464c-a1c7-56ef8805e4f1-utilities\") pod \"certified-operators-6z4jl\" (UID: \"4c060a29-f196-464c-a1c7-56ef8805e4f1\") " pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:12:56 crc kubenswrapper[4644]: I1213 07:12:56.098206 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c060a29-f196-464c-a1c7-56ef8805e4f1-catalog-content\") pod \"certified-operators-6z4jl\" (UID: \"4c060a29-f196-464c-a1c7-56ef8805e4f1\") " pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:12:56 crc kubenswrapper[4644]: I1213 07:12:56.098373 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c060a29-f196-464c-a1c7-56ef8805e4f1-utilities\") pod \"certified-operators-6z4jl\" (UID: \"4c060a29-f196-464c-a1c7-56ef8805e4f1\") " pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:12:56 crc kubenswrapper[4644]: I1213 07:12:56.117986 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xp49z\" (UniqueName: \"kubernetes.io/projected/4c060a29-f196-464c-a1c7-56ef8805e4f1-kube-api-access-xp49z\") pod \"certified-operators-6z4jl\" (UID: \"4c060a29-f196-464c-a1c7-56ef8805e4f1\") " pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:12:56 crc kubenswrapper[4644]: I1213 07:12:56.265139 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:12:56 crc kubenswrapper[4644]: I1213 07:12:56.389747 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:12:56 crc kubenswrapper[4644]: E1213 07:12:56.390530 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:12:56 crc kubenswrapper[4644]: I1213 07:12:56.689715 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6z4jl"] Dec 13 07:12:57 crc kubenswrapper[4644]: I1213 07:12:57.587927 4644 generic.go:334] "Generic (PLEG): container finished" podID="4c060a29-f196-464c-a1c7-56ef8805e4f1" containerID="67bfe296ea3fcb57ced905eb08378d6b61ff4b86e8a955fa68ab58103b87d2af" exitCode=0 Dec 13 07:12:57 crc kubenswrapper[4644]: I1213 07:12:57.588039 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6z4jl" event={"ID":"4c060a29-f196-464c-a1c7-56ef8805e4f1","Type":"ContainerDied","Data":"67bfe296ea3fcb57ced905eb08378d6b61ff4b86e8a955fa68ab58103b87d2af"} Dec 13 07:12:57 crc kubenswrapper[4644]: I1213 07:12:57.588246 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6z4jl" event={"ID":"4c060a29-f196-464c-a1c7-56ef8805e4f1","Type":"ContainerStarted","Data":"17a8fbf6fbd3f0e0f0c91f0ae7ab3c71767d109296533408fcbaef5ea648a241"} Dec 13 07:12:58 crc kubenswrapper[4644]: I1213 07:12:58.599234 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6z4jl" event={"ID":"4c060a29-f196-464c-a1c7-56ef8805e4f1","Type":"ContainerStarted","Data":"223f153d3911d276245c805fe9990d238915eb947aa67ef6d57b4be2df932e47"} Dec 13 07:12:59 crc kubenswrapper[4644]: I1213 07:12:59.035777 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-d6gts"] Dec 13 07:12:59 crc kubenswrapper[4644]: I1213 07:12:59.044543 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-4267-account-create-update-rmqmq"] Dec 13 07:12:59 crc kubenswrapper[4644]: I1213 07:12:59.051543 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-6797-account-create-update-4lnzn"] Dec 13 07:12:59 crc kubenswrapper[4644]: I1213 07:12:59.059550 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-d6gts"] Dec 13 07:12:59 crc kubenswrapper[4644]: I1213 07:12:59.066427 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-93c6-account-create-update-pn2qf"] Dec 13 07:12:59 crc kubenswrapper[4644]: I1213 07:12:59.072632 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-krrzw"] Dec 13 07:12:59 crc kubenswrapper[4644]: I1213 07:12:59.078491 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-kwtnr"] Dec 13 07:12:59 crc kubenswrapper[4644]: I1213 07:12:59.083658 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-4267-account-create-update-rmqmq"] Dec 13 07:12:59 crc kubenswrapper[4644]: I1213 07:12:59.088357 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-kwtnr"] Dec 13 07:12:59 crc kubenswrapper[4644]: I1213 07:12:59.092928 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-93c6-account-create-update-pn2qf"] Dec 13 07:12:59 crc kubenswrapper[4644]: I1213 07:12:59.097373 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-6797-account-create-update-4lnzn"] Dec 13 07:12:59 crc kubenswrapper[4644]: I1213 07:12:59.106658 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-krrzw"] Dec 13 07:12:59 crc kubenswrapper[4644]: I1213 07:12:59.607300 4644 generic.go:334] "Generic (PLEG): container finished" podID="4c060a29-f196-464c-a1c7-56ef8805e4f1" containerID="223f153d3911d276245c805fe9990d238915eb947aa67ef6d57b4be2df932e47" exitCode=0 Dec 13 07:12:59 crc kubenswrapper[4644]: I1213 07:12:59.607345 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6z4jl" event={"ID":"4c060a29-f196-464c-a1c7-56ef8805e4f1","Type":"ContainerDied","Data":"223f153d3911d276245c805fe9990d238915eb947aa67ef6d57b4be2df932e47"} Dec 13 07:13:00 crc kubenswrapper[4644]: I1213 07:13:00.398292 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34cd0e67-a753-4a1b-83c5-00f478cb5dff" path="/var/lib/kubelet/pods/34cd0e67-a753-4a1b-83c5-00f478cb5dff/volumes" Dec 13 07:13:00 crc kubenswrapper[4644]: I1213 07:13:00.399069 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4" path="/var/lib/kubelet/pods/5f039a4e-1c0f-49a6-bd81-3ce67dc1a6b4/volumes" Dec 13 07:13:00 crc kubenswrapper[4644]: I1213 07:13:00.399595 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d25b79c-9651-4e87-acd1-85b3d955cafa" path="/var/lib/kubelet/pods/6d25b79c-9651-4e87-acd1-85b3d955cafa/volumes" Dec 13 07:13:00 crc kubenswrapper[4644]: I1213 07:13:00.400140 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd65e318-075f-4c1c-9b0c-9a3ada36a63e" path="/var/lib/kubelet/pods/cd65e318-075f-4c1c-9b0c-9a3ada36a63e/volumes" Dec 13 07:13:00 crc kubenswrapper[4644]: I1213 07:13:00.401146 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e60af70b-0981-447f-8697-ac2689821fb8" path="/var/lib/kubelet/pods/e60af70b-0981-447f-8697-ac2689821fb8/volumes" Dec 13 07:13:00 crc kubenswrapper[4644]: I1213 07:13:00.401653 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd0c3333-bd69-477d-9989-1c614cf6c2c6" path="/var/lib/kubelet/pods/fd0c3333-bd69-477d-9989-1c614cf6c2c6/volumes" Dec 13 07:13:00 crc kubenswrapper[4644]: I1213 07:13:00.616311 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6z4jl" event={"ID":"4c060a29-f196-464c-a1c7-56ef8805e4f1","Type":"ContainerStarted","Data":"e26369003746c915778802605a3c00dfaee450a2d2a7ed97f822770bad71e989"} Dec 13 07:13:00 crc kubenswrapper[4644]: I1213 07:13:00.632635 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6z4jl" podStartSLOduration=3.061763739 podStartE2EDuration="5.632617537s" podCreationTimestamp="2025-12-13 07:12:55 +0000 UTC" firstStartedPulling="2025-12-13 07:12:57.589805016 +0000 UTC m=+1639.804755850" lastFinishedPulling="2025-12-13 07:13:00.160658815 +0000 UTC m=+1642.375609648" observedRunningTime="2025-12-13 07:13:00.632072112 +0000 UTC m=+1642.847022944" watchObservedRunningTime="2025-12-13 07:13:00.632617537 +0000 UTC m=+1642.847568369" Dec 13 07:13:06 crc kubenswrapper[4644]: I1213 07:13:06.265919 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:13:06 crc kubenswrapper[4644]: I1213 07:13:06.266401 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:13:06 crc kubenswrapper[4644]: I1213 07:13:06.297996 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:13:06 crc kubenswrapper[4644]: I1213 07:13:06.692294 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:13:06 crc kubenswrapper[4644]: I1213 07:13:06.735041 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6z4jl"] Dec 13 07:13:07 crc kubenswrapper[4644]: I1213 07:13:07.389791 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:13:07 crc kubenswrapper[4644]: E1213 07:13:07.390274 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:13:08 crc kubenswrapper[4644]: I1213 07:13:08.669797 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6z4jl" podUID="4c060a29-f196-464c-a1c7-56ef8805e4f1" containerName="registry-server" containerID="cri-o://e26369003746c915778802605a3c00dfaee450a2d2a7ed97f822770bad71e989" gracePeriod=2 Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.037277 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.157371 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c060a29-f196-464c-a1c7-56ef8805e4f1-catalog-content\") pod \"4c060a29-f196-464c-a1c7-56ef8805e4f1\" (UID: \"4c060a29-f196-464c-a1c7-56ef8805e4f1\") " Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.157480 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c060a29-f196-464c-a1c7-56ef8805e4f1-utilities\") pod \"4c060a29-f196-464c-a1c7-56ef8805e4f1\" (UID: \"4c060a29-f196-464c-a1c7-56ef8805e4f1\") " Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.157551 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xp49z\" (UniqueName: \"kubernetes.io/projected/4c060a29-f196-464c-a1c7-56ef8805e4f1-kube-api-access-xp49z\") pod \"4c060a29-f196-464c-a1c7-56ef8805e4f1\" (UID: \"4c060a29-f196-464c-a1c7-56ef8805e4f1\") " Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.158229 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c060a29-f196-464c-a1c7-56ef8805e4f1-utilities" (OuterVolumeSpecName: "utilities") pod "4c060a29-f196-464c-a1c7-56ef8805e4f1" (UID: "4c060a29-f196-464c-a1c7-56ef8805e4f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.174202 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c060a29-f196-464c-a1c7-56ef8805e4f1-kube-api-access-xp49z" (OuterVolumeSpecName: "kube-api-access-xp49z") pod "4c060a29-f196-464c-a1c7-56ef8805e4f1" (UID: "4c060a29-f196-464c-a1c7-56ef8805e4f1"). InnerVolumeSpecName "kube-api-access-xp49z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.195832 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c060a29-f196-464c-a1c7-56ef8805e4f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4c060a29-f196-464c-a1c7-56ef8805e4f1" (UID: "4c060a29-f196-464c-a1c7-56ef8805e4f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.260419 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c060a29-f196-464c-a1c7-56ef8805e4f1-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.260461 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xp49z\" (UniqueName: \"kubernetes.io/projected/4c060a29-f196-464c-a1c7-56ef8805e4f1-kube-api-access-xp49z\") on node \"crc\" DevicePath \"\"" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.260473 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c060a29-f196-464c-a1c7-56ef8805e4f1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.678395 4644 generic.go:334] "Generic (PLEG): container finished" podID="4c060a29-f196-464c-a1c7-56ef8805e4f1" containerID="e26369003746c915778802605a3c00dfaee450a2d2a7ed97f822770bad71e989" exitCode=0 Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.678469 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6z4jl" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.678473 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6z4jl" event={"ID":"4c060a29-f196-464c-a1c7-56ef8805e4f1","Type":"ContainerDied","Data":"e26369003746c915778802605a3c00dfaee450a2d2a7ed97f822770bad71e989"} Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.679572 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6z4jl" event={"ID":"4c060a29-f196-464c-a1c7-56ef8805e4f1","Type":"ContainerDied","Data":"17a8fbf6fbd3f0e0f0c91f0ae7ab3c71767d109296533408fcbaef5ea648a241"} Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.679597 4644 scope.go:117] "RemoveContainer" containerID="e26369003746c915778802605a3c00dfaee450a2d2a7ed97f822770bad71e989" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.696554 4644 scope.go:117] "RemoveContainer" containerID="223f153d3911d276245c805fe9990d238915eb947aa67ef6d57b4be2df932e47" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.705460 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6z4jl"] Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.711229 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6z4jl"] Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.727190 4644 scope.go:117] "RemoveContainer" containerID="67bfe296ea3fcb57ced905eb08378d6b61ff4b86e8a955fa68ab58103b87d2af" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.755073 4644 scope.go:117] "RemoveContainer" containerID="e26369003746c915778802605a3c00dfaee450a2d2a7ed97f822770bad71e989" Dec 13 07:13:09 crc kubenswrapper[4644]: E1213 07:13:09.755835 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e26369003746c915778802605a3c00dfaee450a2d2a7ed97f822770bad71e989\": container with ID starting with e26369003746c915778802605a3c00dfaee450a2d2a7ed97f822770bad71e989 not found: ID does not exist" containerID="e26369003746c915778802605a3c00dfaee450a2d2a7ed97f822770bad71e989" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.755950 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e26369003746c915778802605a3c00dfaee450a2d2a7ed97f822770bad71e989"} err="failed to get container status \"e26369003746c915778802605a3c00dfaee450a2d2a7ed97f822770bad71e989\": rpc error: code = NotFound desc = could not find container \"e26369003746c915778802605a3c00dfaee450a2d2a7ed97f822770bad71e989\": container with ID starting with e26369003746c915778802605a3c00dfaee450a2d2a7ed97f822770bad71e989 not found: ID does not exist" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.755982 4644 scope.go:117] "RemoveContainer" containerID="223f153d3911d276245c805fe9990d238915eb947aa67ef6d57b4be2df932e47" Dec 13 07:13:09 crc kubenswrapper[4644]: E1213 07:13:09.757647 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"223f153d3911d276245c805fe9990d238915eb947aa67ef6d57b4be2df932e47\": container with ID starting with 223f153d3911d276245c805fe9990d238915eb947aa67ef6d57b4be2df932e47 not found: ID does not exist" containerID="223f153d3911d276245c805fe9990d238915eb947aa67ef6d57b4be2df932e47" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.757788 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"223f153d3911d276245c805fe9990d238915eb947aa67ef6d57b4be2df932e47"} err="failed to get container status \"223f153d3911d276245c805fe9990d238915eb947aa67ef6d57b4be2df932e47\": rpc error: code = NotFound desc = could not find container \"223f153d3911d276245c805fe9990d238915eb947aa67ef6d57b4be2df932e47\": container with ID starting with 223f153d3911d276245c805fe9990d238915eb947aa67ef6d57b4be2df932e47 not found: ID does not exist" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.757884 4644 scope.go:117] "RemoveContainer" containerID="67bfe296ea3fcb57ced905eb08378d6b61ff4b86e8a955fa68ab58103b87d2af" Dec 13 07:13:09 crc kubenswrapper[4644]: E1213 07:13:09.758339 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67bfe296ea3fcb57ced905eb08378d6b61ff4b86e8a955fa68ab58103b87d2af\": container with ID starting with 67bfe296ea3fcb57ced905eb08378d6b61ff4b86e8a955fa68ab58103b87d2af not found: ID does not exist" containerID="67bfe296ea3fcb57ced905eb08378d6b61ff4b86e8a955fa68ab58103b87d2af" Dec 13 07:13:09 crc kubenswrapper[4644]: I1213 07:13:09.758365 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67bfe296ea3fcb57ced905eb08378d6b61ff4b86e8a955fa68ab58103b87d2af"} err="failed to get container status \"67bfe296ea3fcb57ced905eb08378d6b61ff4b86e8a955fa68ab58103b87d2af\": rpc error: code = NotFound desc = could not find container \"67bfe296ea3fcb57ced905eb08378d6b61ff4b86e8a955fa68ab58103b87d2af\": container with ID starting with 67bfe296ea3fcb57ced905eb08378d6b61ff4b86e8a955fa68ab58103b87d2af not found: ID does not exist" Dec 13 07:13:10 crc kubenswrapper[4644]: I1213 07:13:10.397742 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c060a29-f196-464c-a1c7-56ef8805e4f1" path="/var/lib/kubelet/pods/4c060a29-f196-464c-a1c7-56ef8805e4f1/volumes" Dec 13 07:13:15 crc kubenswrapper[4644]: I1213 07:13:15.031388 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-mkjbp"] Dec 13 07:13:15 crc kubenswrapper[4644]: I1213 07:13:15.037377 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-mkjbp"] Dec 13 07:13:16 crc kubenswrapper[4644]: I1213 07:13:16.397778 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="549d3d46-a7b0-4b33-b991-80b4eab06548" path="/var/lib/kubelet/pods/549d3d46-a7b0-4b33-b991-80b4eab06548/volumes" Dec 13 07:13:18 crc kubenswrapper[4644]: I1213 07:13:18.394716 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:13:18 crc kubenswrapper[4644]: E1213 07:13:18.395261 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:13:29 crc kubenswrapper[4644]: I1213 07:13:29.026034 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nq9b5"] Dec 13 07:13:29 crc kubenswrapper[4644]: I1213 07:13:29.036572 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nq9b5"] Dec 13 07:13:30 crc kubenswrapper[4644]: I1213 07:13:30.400016 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d850ca3e-58f1-4035-80b5-1799182101dc" path="/var/lib/kubelet/pods/d850ca3e-58f1-4035-80b5-1799182101dc/volumes" Dec 13 07:13:31 crc kubenswrapper[4644]: I1213 07:13:31.028995 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-z4btn"] Dec 13 07:13:31 crc kubenswrapper[4644]: I1213 07:13:31.034800 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-z4btn"] Dec 13 07:13:31 crc kubenswrapper[4644]: I1213 07:13:31.389080 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:13:31 crc kubenswrapper[4644]: E1213 07:13:31.389596 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:13:32 crc kubenswrapper[4644]: I1213 07:13:32.397505 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c88cb8b-6880-4be0-a4bc-467783f0f752" path="/var/lib/kubelet/pods/5c88cb8b-6880-4be0-a4bc-467783f0f752/volumes" Dec 13 07:13:42 crc kubenswrapper[4644]: I1213 07:13:42.159632 4644 scope.go:117] "RemoveContainer" containerID="96e2ba3e1b31d09d264c0684115656ca62ff25162b36332a850a72184ebf8bea" Dec 13 07:13:42 crc kubenswrapper[4644]: I1213 07:13:42.202715 4644 scope.go:117] "RemoveContainer" containerID="003dbdc46051909f9b871c9826b2c83493e678e380a26c43b98e5181f61f892b" Dec 13 07:13:42 crc kubenswrapper[4644]: I1213 07:13:42.224351 4644 scope.go:117] "RemoveContainer" containerID="0534f0d864eab5a9c9bf5a3bca7456b016acf421f62786703aae5b42c371c224" Dec 13 07:13:42 crc kubenswrapper[4644]: I1213 07:13:42.272865 4644 scope.go:117] "RemoveContainer" containerID="aed194971b5f1981fff2f7009769d2e0e792f4fe8c6a9aedee65a285b52359b1" Dec 13 07:13:42 crc kubenswrapper[4644]: I1213 07:13:42.289325 4644 scope.go:117] "RemoveContainer" containerID="d37925dc19bf2c8bbb92be40c4d3ea0031e1ae8f733bbfefbc51540c753094f1" Dec 13 07:13:42 crc kubenswrapper[4644]: I1213 07:13:42.320346 4644 scope.go:117] "RemoveContainer" containerID="54e5eac74868b80de3e3c799568b678cb8539c7c649b20365117a19da7bed4b3" Dec 13 07:13:42 crc kubenswrapper[4644]: I1213 07:13:42.369113 4644 scope.go:117] "RemoveContainer" containerID="0b1e6f107ee7d0752b8918402b7db77af6a4d08a78d67944df42e807018a60e6" Dec 13 07:13:42 crc kubenswrapper[4644]: I1213 07:13:42.386153 4644 scope.go:117] "RemoveContainer" containerID="6623c4fa9a070596fa9c295a06a77b1941eaf169d0b1d179dcb300b468b3d65e" Dec 13 07:13:42 crc kubenswrapper[4644]: I1213 07:13:42.405951 4644 scope.go:117] "RemoveContainer" containerID="bec4b4e36b329fd7f5534c46ed9a6ef0cd00af58ec9aed18af0295ffe275c722" Dec 13 07:13:45 crc kubenswrapper[4644]: I1213 07:13:45.389288 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:13:45 crc kubenswrapper[4644]: E1213 07:13:45.390026 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:13:56 crc kubenswrapper[4644]: I1213 07:13:56.389913 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:13:56 crc kubenswrapper[4644]: E1213 07:13:56.390686 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:14:08 crc kubenswrapper[4644]: I1213 07:14:08.414538 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:14:08 crc kubenswrapper[4644]: E1213 07:14:08.415739 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:14:16 crc kubenswrapper[4644]: I1213 07:14:16.035161 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-4h7xs"] Dec 13 07:14:16 crc kubenswrapper[4644]: I1213 07:14:16.042782 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-4h7xs"] Dec 13 07:14:16 crc kubenswrapper[4644]: I1213 07:14:16.398524 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440" path="/var/lib/kubelet/pods/f3e0cf20-65fd-4d5e-b1ae-a8c51f29b440/volumes" Dec 13 07:14:22 crc kubenswrapper[4644]: I1213 07:14:22.389767 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:14:22 crc kubenswrapper[4644]: E1213 07:14:22.390663 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:14:37 crc kubenswrapper[4644]: I1213 07:14:37.390325 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:14:37 crc kubenswrapper[4644]: E1213 07:14:37.391201 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:14:42 crc kubenswrapper[4644]: I1213 07:14:42.542541 4644 scope.go:117] "RemoveContainer" containerID="d3474a3cb232f3aa7fc1ca89462ddac691abf6f598caeda5f214b774725cd68c" Dec 13 07:14:51 crc kubenswrapper[4644]: I1213 07:14:51.389791 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:14:51 crc kubenswrapper[4644]: E1213 07:14:51.390759 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.144274 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth"] Dec 13 07:15:00 crc kubenswrapper[4644]: E1213 07:15:00.145145 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c060a29-f196-464c-a1c7-56ef8805e4f1" containerName="extract-utilities" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.145162 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c060a29-f196-464c-a1c7-56ef8805e4f1" containerName="extract-utilities" Dec 13 07:15:00 crc kubenswrapper[4644]: E1213 07:15:00.145190 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c060a29-f196-464c-a1c7-56ef8805e4f1" containerName="extract-content" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.145196 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c060a29-f196-464c-a1c7-56ef8805e4f1" containerName="extract-content" Dec 13 07:15:00 crc kubenswrapper[4644]: E1213 07:15:00.145208 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c060a29-f196-464c-a1c7-56ef8805e4f1" containerName="registry-server" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.145213 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c060a29-f196-464c-a1c7-56ef8805e4f1" containerName="registry-server" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.145395 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c060a29-f196-464c-a1c7-56ef8805e4f1" containerName="registry-server" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.146052 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.150023 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.150189 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.157763 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth"] Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.268736 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpj8v\" (UniqueName: \"kubernetes.io/projected/89b0b685-9991-4bee-986c-2eba60639e34-kube-api-access-dpj8v\") pod \"collect-profiles-29426835-9hdth\" (UID: \"89b0b685-9991-4bee-986c-2eba60639e34\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.269059 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89b0b685-9991-4bee-986c-2eba60639e34-secret-volume\") pod \"collect-profiles-29426835-9hdth\" (UID: \"89b0b685-9991-4bee-986c-2eba60639e34\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.269336 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89b0b685-9991-4bee-986c-2eba60639e34-config-volume\") pod \"collect-profiles-29426835-9hdth\" (UID: \"89b0b685-9991-4bee-986c-2eba60639e34\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.372627 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89b0b685-9991-4bee-986c-2eba60639e34-secret-volume\") pod \"collect-profiles-29426835-9hdth\" (UID: \"89b0b685-9991-4bee-986c-2eba60639e34\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.372946 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89b0b685-9991-4bee-986c-2eba60639e34-config-volume\") pod \"collect-profiles-29426835-9hdth\" (UID: \"89b0b685-9991-4bee-986c-2eba60639e34\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.373143 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpj8v\" (UniqueName: \"kubernetes.io/projected/89b0b685-9991-4bee-986c-2eba60639e34-kube-api-access-dpj8v\") pod \"collect-profiles-29426835-9hdth\" (UID: \"89b0b685-9991-4bee-986c-2eba60639e34\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.374010 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89b0b685-9991-4bee-986c-2eba60639e34-config-volume\") pod \"collect-profiles-29426835-9hdth\" (UID: \"89b0b685-9991-4bee-986c-2eba60639e34\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.381311 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89b0b685-9991-4bee-986c-2eba60639e34-secret-volume\") pod \"collect-profiles-29426835-9hdth\" (UID: \"89b0b685-9991-4bee-986c-2eba60639e34\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.393292 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpj8v\" (UniqueName: \"kubernetes.io/projected/89b0b685-9991-4bee-986c-2eba60639e34-kube-api-access-dpj8v\") pod \"collect-profiles-29426835-9hdth\" (UID: \"89b0b685-9991-4bee-986c-2eba60639e34\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.463496 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" Dec 13 07:15:00 crc kubenswrapper[4644]: I1213 07:15:00.847304 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth"] Dec 13 07:15:01 crc kubenswrapper[4644]: I1213 07:15:01.477350 4644 generic.go:334] "Generic (PLEG): container finished" podID="89b0b685-9991-4bee-986c-2eba60639e34" containerID="48e913b1fdc997232850652921090f0820cdc1839d2fd395fad54ced249762cd" exitCode=0 Dec 13 07:15:01 crc kubenswrapper[4644]: I1213 07:15:01.477408 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" event={"ID":"89b0b685-9991-4bee-986c-2eba60639e34","Type":"ContainerDied","Data":"48e913b1fdc997232850652921090f0820cdc1839d2fd395fad54ced249762cd"} Dec 13 07:15:01 crc kubenswrapper[4644]: I1213 07:15:01.477738 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" event={"ID":"89b0b685-9991-4bee-986c-2eba60639e34","Type":"ContainerStarted","Data":"ea42e1c2006585fedd40c8e77e9c6f40cf9b73aee7af5eea7a0d383e104dad24"} Dec 13 07:15:02 crc kubenswrapper[4644]: I1213 07:15:02.710324 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" Dec 13 07:15:02 crc kubenswrapper[4644]: I1213 07:15:02.823789 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89b0b685-9991-4bee-986c-2eba60639e34-config-volume\") pod \"89b0b685-9991-4bee-986c-2eba60639e34\" (UID: \"89b0b685-9991-4bee-986c-2eba60639e34\") " Dec 13 07:15:02 crc kubenswrapper[4644]: I1213 07:15:02.823861 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpj8v\" (UniqueName: \"kubernetes.io/projected/89b0b685-9991-4bee-986c-2eba60639e34-kube-api-access-dpj8v\") pod \"89b0b685-9991-4bee-986c-2eba60639e34\" (UID: \"89b0b685-9991-4bee-986c-2eba60639e34\") " Dec 13 07:15:02 crc kubenswrapper[4644]: I1213 07:15:02.823958 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89b0b685-9991-4bee-986c-2eba60639e34-secret-volume\") pod \"89b0b685-9991-4bee-986c-2eba60639e34\" (UID: \"89b0b685-9991-4bee-986c-2eba60639e34\") " Dec 13 07:15:02 crc kubenswrapper[4644]: I1213 07:15:02.824833 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89b0b685-9991-4bee-986c-2eba60639e34-config-volume" (OuterVolumeSpecName: "config-volume") pod "89b0b685-9991-4bee-986c-2eba60639e34" (UID: "89b0b685-9991-4bee-986c-2eba60639e34"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:15:02 crc kubenswrapper[4644]: I1213 07:15:02.829258 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89b0b685-9991-4bee-986c-2eba60639e34-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "89b0b685-9991-4bee-986c-2eba60639e34" (UID: "89b0b685-9991-4bee-986c-2eba60639e34"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:15:02 crc kubenswrapper[4644]: I1213 07:15:02.829337 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89b0b685-9991-4bee-986c-2eba60639e34-kube-api-access-dpj8v" (OuterVolumeSpecName: "kube-api-access-dpj8v") pod "89b0b685-9991-4bee-986c-2eba60639e34" (UID: "89b0b685-9991-4bee-986c-2eba60639e34"). InnerVolumeSpecName "kube-api-access-dpj8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:15:02 crc kubenswrapper[4644]: I1213 07:15:02.926003 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpj8v\" (UniqueName: \"kubernetes.io/projected/89b0b685-9991-4bee-986c-2eba60639e34-kube-api-access-dpj8v\") on node \"crc\" DevicePath \"\"" Dec 13 07:15:02 crc kubenswrapper[4644]: I1213 07:15:02.926037 4644 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89b0b685-9991-4bee-986c-2eba60639e34-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 13 07:15:02 crc kubenswrapper[4644]: I1213 07:15:02.926048 4644 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89b0b685-9991-4bee-986c-2eba60639e34-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 07:15:03 crc kubenswrapper[4644]: I1213 07:15:03.497921 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" event={"ID":"89b0b685-9991-4bee-986c-2eba60639e34","Type":"ContainerDied","Data":"ea42e1c2006585fedd40c8e77e9c6f40cf9b73aee7af5eea7a0d383e104dad24"} Dec 13 07:15:03 crc kubenswrapper[4644]: I1213 07:15:03.497982 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-9hdth" Dec 13 07:15:03 crc kubenswrapper[4644]: I1213 07:15:03.498010 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea42e1c2006585fedd40c8e77e9c6f40cf9b73aee7af5eea7a0d383e104dad24" Dec 13 07:15:05 crc kubenswrapper[4644]: I1213 07:15:05.389526 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:15:05 crc kubenswrapper[4644]: E1213 07:15:05.390118 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:15:17 crc kubenswrapper[4644]: I1213 07:15:17.391278 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:15:17 crc kubenswrapper[4644]: E1213 07:15:17.392578 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:15:32 crc kubenswrapper[4644]: I1213 07:15:32.389549 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:15:32 crc kubenswrapper[4644]: E1213 07:15:32.390191 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:15:44 crc kubenswrapper[4644]: I1213 07:15:44.389856 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:15:44 crc kubenswrapper[4644]: E1213 07:15:44.390847 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.466912 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.472565 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.477844 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.482291 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.487219 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.491734 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-6g9cs"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.496213 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.500495 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.505069 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6zn7l"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.509416 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-jcj4x"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.513890 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-88tm2"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.519846 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-sv6z4"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.526310 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nd86n"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.532152 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dpspr"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.538354 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-6g9cs"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.543458 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.547778 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wrrrw"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.552139 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-ph8mz"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.571574 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t"] Dec 13 07:15:47 crc kubenswrapper[4644]: I1213 07:15:47.578039 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kzk4t"] Dec 13 07:15:48 crc kubenswrapper[4644]: I1213 07:15:48.398267 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02272783-8f74-4b52-9518-cb97dcf8205b" path="/var/lib/kubelet/pods/02272783-8f74-4b52-9518-cb97dcf8205b/volumes" Dec 13 07:15:48 crc kubenswrapper[4644]: I1213 07:15:48.399168 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b444632-d889-418a-83b5-2ce9234107a5" path="/var/lib/kubelet/pods/1b444632-d889-418a-83b5-2ce9234107a5/volumes" Dec 13 07:15:48 crc kubenswrapper[4644]: I1213 07:15:48.399659 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34b9af82-a9c7-4c74-a36c-4205d3ff4427" path="/var/lib/kubelet/pods/34b9af82-a9c7-4c74-a36c-4205d3ff4427/volumes" Dec 13 07:15:48 crc kubenswrapper[4644]: I1213 07:15:48.400119 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59121abb-74f1-43fb-ba78-29bf6cdb871e" path="/var/lib/kubelet/pods/59121abb-74f1-43fb-ba78-29bf6cdb871e/volumes" Dec 13 07:15:48 crc kubenswrapper[4644]: I1213 07:15:48.401069 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63da52dc-29de-43e7-b967-a687cff1c918" path="/var/lib/kubelet/pods/63da52dc-29de-43e7-b967-a687cff1c918/volumes" Dec 13 07:15:48 crc kubenswrapper[4644]: I1213 07:15:48.401543 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d41e3c7-46ae-442b-a11c-4ddedb2c6398" path="/var/lib/kubelet/pods/7d41e3c7-46ae-442b-a11c-4ddedb2c6398/volumes" Dec 13 07:15:48 crc kubenswrapper[4644]: I1213 07:15:48.401983 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bf97ceb-19ea-4495-9d8a-5dc150074bd5" path="/var/lib/kubelet/pods/8bf97ceb-19ea-4495-9d8a-5dc150074bd5/volumes" Dec 13 07:15:48 crc kubenswrapper[4644]: I1213 07:15:48.402899 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c87e60d6-1b2d-47eb-99ae-543b7b454813" path="/var/lib/kubelet/pods/c87e60d6-1b2d-47eb-99ae-543b7b454813/volumes" Dec 13 07:15:48 crc kubenswrapper[4644]: I1213 07:15:48.403363 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d241dfce-7e9d-4f7b-9869-aa23e3d3569f" path="/var/lib/kubelet/pods/d241dfce-7e9d-4f7b-9869-aa23e3d3569f/volumes" Dec 13 07:15:48 crc kubenswrapper[4644]: I1213 07:15:48.403908 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe9da418-f306-4918-a4f5-9c8b0b9ebfe4" path="/var/lib/kubelet/pods/fe9da418-f306-4918-a4f5-9c8b0b9ebfe4/volumes" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.468512 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp"] Dec 13 07:15:52 crc kubenswrapper[4644]: E1213 07:15:52.469068 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89b0b685-9991-4bee-986c-2eba60639e34" containerName="collect-profiles" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.469082 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="89b0b685-9991-4bee-986c-2eba60639e34" containerName="collect-profiles" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.469244 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="89b0b685-9991-4bee-986c-2eba60639e34" containerName="collect-profiles" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.469820 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.474368 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.474738 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.474926 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.474934 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.475288 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.483459 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp"] Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.584891 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wq8qh\" (UniqueName: \"kubernetes.io/projected/446c5329-ac75-4c95-a98a-a18ce659ebcd-kube-api-access-wq8qh\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.585306 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.585668 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.585842 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.586320 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.688605 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.688656 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.688681 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wq8qh\" (UniqueName: \"kubernetes.io/projected/446c5329-ac75-4c95-a98a-a18ce659ebcd-kube-api-access-wq8qh\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.688777 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.688850 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.694193 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.694199 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.694379 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.702144 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.702506 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wq8qh\" (UniqueName: \"kubernetes.io/projected/446c5329-ac75-4c95-a98a-a18ce659ebcd-kube-api-access-wq8qh\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:52 crc kubenswrapper[4644]: I1213 07:15:52.785246 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:15:53 crc kubenswrapper[4644]: I1213 07:15:53.228762 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp"] Dec 13 07:15:53 crc kubenswrapper[4644]: I1213 07:15:53.233223 4644 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 07:15:53 crc kubenswrapper[4644]: I1213 07:15:53.879255 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" event={"ID":"446c5329-ac75-4c95-a98a-a18ce659ebcd","Type":"ContainerStarted","Data":"db637b161e110c298a9822aa056c6fe074b45fe4ba13ecbecb351277ea221741"} Dec 13 07:15:54 crc kubenswrapper[4644]: I1213 07:15:54.887885 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" event={"ID":"446c5329-ac75-4c95-a98a-a18ce659ebcd","Type":"ContainerStarted","Data":"861239511bca5cb8625c5ab6817d5f6466ef847b204d0f69538f472dbcec6269"} Dec 13 07:15:54 crc kubenswrapper[4644]: I1213 07:15:54.902545 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" podStartSLOduration=2.190408196 podStartE2EDuration="2.902513799s" podCreationTimestamp="2025-12-13 07:15:52 +0000 UTC" firstStartedPulling="2025-12-13 07:15:53.232984218 +0000 UTC m=+1815.447935050" lastFinishedPulling="2025-12-13 07:15:53.945089821 +0000 UTC m=+1816.160040653" observedRunningTime="2025-12-13 07:15:54.898837106 +0000 UTC m=+1817.113787939" watchObservedRunningTime="2025-12-13 07:15:54.902513799 +0000 UTC m=+1817.117464631" Dec 13 07:15:58 crc kubenswrapper[4644]: I1213 07:15:58.393671 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:15:58 crc kubenswrapper[4644]: E1213 07:15:58.394474 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:16:09 crc kubenswrapper[4644]: I1213 07:16:09.015493 4644 generic.go:334] "Generic (PLEG): container finished" podID="446c5329-ac75-4c95-a98a-a18ce659ebcd" containerID="861239511bca5cb8625c5ab6817d5f6466ef847b204d0f69538f472dbcec6269" exitCode=2 Dec 13 07:16:09 crc kubenswrapper[4644]: I1213 07:16:09.015540 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" event={"ID":"446c5329-ac75-4c95-a98a-a18ce659ebcd","Type":"ContainerDied","Data":"861239511bca5cb8625c5ab6817d5f6466ef847b204d0f69538f472dbcec6269"} Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.361725 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.391718 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:16:10 crc kubenswrapper[4644]: E1213 07:16:10.392076 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.460992 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wq8qh\" (UniqueName: \"kubernetes.io/projected/446c5329-ac75-4c95-a98a-a18ce659ebcd-kube-api-access-wq8qh\") pod \"446c5329-ac75-4c95-a98a-a18ce659ebcd\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.461041 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-ceph\") pod \"446c5329-ac75-4c95-a98a-a18ce659ebcd\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.461086 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-repo-setup-combined-ca-bundle\") pod \"446c5329-ac75-4c95-a98a-a18ce659ebcd\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.461155 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-inventory\") pod \"446c5329-ac75-4c95-a98a-a18ce659ebcd\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.461374 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-ssh-key\") pod \"446c5329-ac75-4c95-a98a-a18ce659ebcd\" (UID: \"446c5329-ac75-4c95-a98a-a18ce659ebcd\") " Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.469900 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "446c5329-ac75-4c95-a98a-a18ce659ebcd" (UID: "446c5329-ac75-4c95-a98a-a18ce659ebcd"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.470072 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-ceph" (OuterVolumeSpecName: "ceph") pod "446c5329-ac75-4c95-a98a-a18ce659ebcd" (UID: "446c5329-ac75-4c95-a98a-a18ce659ebcd"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.469939 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/446c5329-ac75-4c95-a98a-a18ce659ebcd-kube-api-access-wq8qh" (OuterVolumeSpecName: "kube-api-access-wq8qh") pod "446c5329-ac75-4c95-a98a-a18ce659ebcd" (UID: "446c5329-ac75-4c95-a98a-a18ce659ebcd"). InnerVolumeSpecName "kube-api-access-wq8qh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.484430 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "446c5329-ac75-4c95-a98a-a18ce659ebcd" (UID: "446c5329-ac75-4c95-a98a-a18ce659ebcd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.485793 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-inventory" (OuterVolumeSpecName: "inventory") pod "446c5329-ac75-4c95-a98a-a18ce659ebcd" (UID: "446c5329-ac75-4c95-a98a-a18ce659ebcd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.564100 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.564139 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wq8qh\" (UniqueName: \"kubernetes.io/projected/446c5329-ac75-4c95-a98a-a18ce659ebcd-kube-api-access-wq8qh\") on node \"crc\" DevicePath \"\"" Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.564152 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.564163 4644 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:16:10 crc kubenswrapper[4644]: I1213 07:16:10.564176 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/446c5329-ac75-4c95-a98a-a18ce659ebcd-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:16:11 crc kubenswrapper[4644]: I1213 07:16:11.031797 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" event={"ID":"446c5329-ac75-4c95-a98a-a18ce659ebcd","Type":"ContainerDied","Data":"db637b161e110c298a9822aa056c6fe074b45fe4ba13ecbecb351277ea221741"} Dec 13 07:16:11 crc kubenswrapper[4644]: I1213 07:16:11.031845 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db637b161e110c298a9822aa056c6fe074b45fe4ba13ecbecb351277ea221741" Dec 13 07:16:11 crc kubenswrapper[4644]: I1213 07:16:11.031887 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.021375 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8"] Dec 13 07:16:18 crc kubenswrapper[4644]: E1213 07:16:18.022264 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="446c5329-ac75-4c95-a98a-a18ce659ebcd" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.022281 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="446c5329-ac75-4c95-a98a-a18ce659ebcd" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.022480 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="446c5329-ac75-4c95-a98a-a18ce659ebcd" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.023073 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.027745 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.027808 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.027955 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.028565 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.028608 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.033926 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8"] Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.139835 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.140090 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.140311 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkrjp\" (UniqueName: \"kubernetes.io/projected/70ea68cc-693f-4b1c-b563-b25ac7d5da93-kube-api-access-tkrjp\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.140399 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.140559 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.241984 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.242084 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.242299 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.242596 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkrjp\" (UniqueName: \"kubernetes.io/projected/70ea68cc-693f-4b1c-b563-b25ac7d5da93-kube-api-access-tkrjp\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.242689 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.248641 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.248826 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.249057 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.249376 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.257893 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkrjp\" (UniqueName: \"kubernetes.io/projected/70ea68cc-693f-4b1c-b563-b25ac7d5da93-kube-api-access-tkrjp\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.340281 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:18 crc kubenswrapper[4644]: I1213 07:16:18.790055 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8"] Dec 13 07:16:19 crc kubenswrapper[4644]: I1213 07:16:19.090571 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" event={"ID":"70ea68cc-693f-4b1c-b563-b25ac7d5da93","Type":"ContainerStarted","Data":"2882fe5f71d5cea7a10eb90250e2564b3bdcc1b266c54a0e20b465aa16676385"} Dec 13 07:16:20 crc kubenswrapper[4644]: I1213 07:16:20.109546 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" event={"ID":"70ea68cc-693f-4b1c-b563-b25ac7d5da93","Type":"ContainerStarted","Data":"46013bad022a7530f38ccde2e93604a80a3bd6481625ef8e46206bf8464d82e4"} Dec 13 07:16:20 crc kubenswrapper[4644]: I1213 07:16:20.129253 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" podStartSLOduration=1.619711645 podStartE2EDuration="2.129217063s" podCreationTimestamp="2025-12-13 07:16:18 +0000 UTC" firstStartedPulling="2025-12-13 07:16:18.797826221 +0000 UTC m=+1841.012777053" lastFinishedPulling="2025-12-13 07:16:19.307331638 +0000 UTC m=+1841.522282471" observedRunningTime="2025-12-13 07:16:20.125968344 +0000 UTC m=+1842.340919178" watchObservedRunningTime="2025-12-13 07:16:20.129217063 +0000 UTC m=+1842.344167895" Dec 13 07:16:21 crc kubenswrapper[4644]: I1213 07:16:21.389483 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:16:21 crc kubenswrapper[4644]: E1213 07:16:21.390178 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:16:23 crc kubenswrapper[4644]: I1213 07:16:23.130546 4644 generic.go:334] "Generic (PLEG): container finished" podID="70ea68cc-693f-4b1c-b563-b25ac7d5da93" containerID="46013bad022a7530f38ccde2e93604a80a3bd6481625ef8e46206bf8464d82e4" exitCode=2 Dec 13 07:16:23 crc kubenswrapper[4644]: I1213 07:16:23.130650 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" event={"ID":"70ea68cc-693f-4b1c-b563-b25ac7d5da93","Type":"ContainerDied","Data":"46013bad022a7530f38ccde2e93604a80a3bd6481625ef8e46206bf8464d82e4"} Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.484980 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.576131 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-repo-setup-combined-ca-bundle\") pod \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.576370 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-ssh-key\") pod \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.576533 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkrjp\" (UniqueName: \"kubernetes.io/projected/70ea68cc-693f-4b1c-b563-b25ac7d5da93-kube-api-access-tkrjp\") pod \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.576615 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-inventory\") pod \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.576649 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-ceph\") pod \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\" (UID: \"70ea68cc-693f-4b1c-b563-b25ac7d5da93\") " Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.584384 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70ea68cc-693f-4b1c-b563-b25ac7d5da93-kube-api-access-tkrjp" (OuterVolumeSpecName: "kube-api-access-tkrjp") pod "70ea68cc-693f-4b1c-b563-b25ac7d5da93" (UID: "70ea68cc-693f-4b1c-b563-b25ac7d5da93"). InnerVolumeSpecName "kube-api-access-tkrjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.584428 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "70ea68cc-693f-4b1c-b563-b25ac7d5da93" (UID: "70ea68cc-693f-4b1c-b563-b25ac7d5da93"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.584786 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-ceph" (OuterVolumeSpecName: "ceph") pod "70ea68cc-693f-4b1c-b563-b25ac7d5da93" (UID: "70ea68cc-693f-4b1c-b563-b25ac7d5da93"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.604965 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-inventory" (OuterVolumeSpecName: "inventory") pod "70ea68cc-693f-4b1c-b563-b25ac7d5da93" (UID: "70ea68cc-693f-4b1c-b563-b25ac7d5da93"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.606311 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "70ea68cc-693f-4b1c-b563-b25ac7d5da93" (UID: "70ea68cc-693f-4b1c-b563-b25ac7d5da93"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.680505 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkrjp\" (UniqueName: \"kubernetes.io/projected/70ea68cc-693f-4b1c-b563-b25ac7d5da93-kube-api-access-tkrjp\") on node \"crc\" DevicePath \"\"" Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.680811 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.680824 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.680834 4644 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:16:24 crc kubenswrapper[4644]: I1213 07:16:24.680846 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/70ea68cc-693f-4b1c-b563-b25ac7d5da93-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:16:25 crc kubenswrapper[4644]: I1213 07:16:25.148838 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" event={"ID":"70ea68cc-693f-4b1c-b563-b25ac7d5da93","Type":"ContainerDied","Data":"2882fe5f71d5cea7a10eb90250e2564b3bdcc1b266c54a0e20b465aa16676385"} Dec 13 07:16:25 crc kubenswrapper[4644]: I1213 07:16:25.148904 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2882fe5f71d5cea7a10eb90250e2564b3bdcc1b266c54a0e20b465aa16676385" Dec 13 07:16:25 crc kubenswrapper[4644]: I1213 07:16:25.148930 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8" Dec 13 07:16:36 crc kubenswrapper[4644]: I1213 07:16:36.389402 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:16:36 crc kubenswrapper[4644]: E1213 07:16:36.390094 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.023727 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2"] Dec 13 07:16:42 crc kubenswrapper[4644]: E1213 07:16:42.024603 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70ea68cc-693f-4b1c-b563-b25ac7d5da93" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.024621 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="70ea68cc-693f-4b1c-b563-b25ac7d5da93" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.024808 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="70ea68cc-693f-4b1c-b563-b25ac7d5da93" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.025481 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.027620 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.028057 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.028197 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.028430 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.028459 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.037243 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2"] Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.195490 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.195781 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.195854 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.196049 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.196127 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tm88b\" (UniqueName: \"kubernetes.io/projected/40249106-b634-45cd-b5fc-5648159b34c5-kube-api-access-tm88b\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.298113 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.298205 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tm88b\" (UniqueName: \"kubernetes.io/projected/40249106-b634-45cd-b5fc-5648159b34c5-kube-api-access-tm88b\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.298325 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.298576 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.298638 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.305921 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.306240 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.307566 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.307572 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.313709 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tm88b\" (UniqueName: \"kubernetes.io/projected/40249106-b634-45cd-b5fc-5648159b34c5-kube-api-access-tm88b\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.343876 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.615820 4644 scope.go:117] "RemoveContainer" containerID="54aa967c5a507027dd9d70a787419415934af769780674e2094fbd9d52a2c130" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.666617 4644 scope.go:117] "RemoveContainer" containerID="de5b80f5be9726757ca004c2540a881265471782a5b760644d1363cb15d0bdfd" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.728522 4644 scope.go:117] "RemoveContainer" containerID="e11dc725bccd6bd6471a672de3e45f229a9da8881f4974761bbd5ce158470e84" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.763757 4644 scope.go:117] "RemoveContainer" containerID="ebd2ac61733526bd5fd4823be477d0cb2a2ad20c05024f32ee7325ef587c24c8" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.804035 4644 scope.go:117] "RemoveContainer" containerID="d2e3dfc53e3a26f147eba9af7aeaa67176249426fdb0942a483d0d44443c4e13" Dec 13 07:16:42 crc kubenswrapper[4644]: I1213 07:16:42.825669 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2"] Dec 13 07:16:43 crc kubenswrapper[4644]: I1213 07:16:43.289122 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" event={"ID":"40249106-b634-45cd-b5fc-5648159b34c5","Type":"ContainerStarted","Data":"61dd41dba5d09ade420c6c14faf754fef4d4ee4755b6db8666ffa3c518093c83"} Dec 13 07:16:44 crc kubenswrapper[4644]: I1213 07:16:44.306863 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" event={"ID":"40249106-b634-45cd-b5fc-5648159b34c5","Type":"ContainerStarted","Data":"ec5406d77eb736cf39640f1386ad9bc9bddb4ea099d19a9f03e1beb89e80a675"} Dec 13 07:16:44 crc kubenswrapper[4644]: I1213 07:16:44.322836 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" podStartSLOduration=1.833355665 podStartE2EDuration="2.32281961s" podCreationTimestamp="2025-12-13 07:16:42 +0000 UTC" firstStartedPulling="2025-12-13 07:16:42.852768665 +0000 UTC m=+1865.067719498" lastFinishedPulling="2025-12-13 07:16:43.34223261 +0000 UTC m=+1865.557183443" observedRunningTime="2025-12-13 07:16:44.317142796 +0000 UTC m=+1866.532093630" watchObservedRunningTime="2025-12-13 07:16:44.32281961 +0000 UTC m=+1866.537770443" Dec 13 07:16:47 crc kubenswrapper[4644]: I1213 07:16:47.327727 4644 generic.go:334] "Generic (PLEG): container finished" podID="40249106-b634-45cd-b5fc-5648159b34c5" containerID="ec5406d77eb736cf39640f1386ad9bc9bddb4ea099d19a9f03e1beb89e80a675" exitCode=2 Dec 13 07:16:47 crc kubenswrapper[4644]: I1213 07:16:47.327801 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" event={"ID":"40249106-b634-45cd-b5fc-5648159b34c5","Type":"ContainerDied","Data":"ec5406d77eb736cf39640f1386ad9bc9bddb4ea099d19a9f03e1beb89e80a675"} Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.657043 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.843198 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-ceph\") pod \"40249106-b634-45cd-b5fc-5648159b34c5\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.843614 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-repo-setup-combined-ca-bundle\") pod \"40249106-b634-45cd-b5fc-5648159b34c5\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.843717 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-ssh-key\") pod \"40249106-b634-45cd-b5fc-5648159b34c5\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.843875 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tm88b\" (UniqueName: \"kubernetes.io/projected/40249106-b634-45cd-b5fc-5648159b34c5-kube-api-access-tm88b\") pod \"40249106-b634-45cd-b5fc-5648159b34c5\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.843948 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-inventory\") pod \"40249106-b634-45cd-b5fc-5648159b34c5\" (UID: \"40249106-b634-45cd-b5fc-5648159b34c5\") " Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.849798 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "40249106-b634-45cd-b5fc-5648159b34c5" (UID: "40249106-b634-45cd-b5fc-5648159b34c5"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.850020 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-ceph" (OuterVolumeSpecName: "ceph") pod "40249106-b634-45cd-b5fc-5648159b34c5" (UID: "40249106-b634-45cd-b5fc-5648159b34c5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.850133 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40249106-b634-45cd-b5fc-5648159b34c5-kube-api-access-tm88b" (OuterVolumeSpecName: "kube-api-access-tm88b") pod "40249106-b634-45cd-b5fc-5648159b34c5" (UID: "40249106-b634-45cd-b5fc-5648159b34c5"). InnerVolumeSpecName "kube-api-access-tm88b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.866187 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "40249106-b634-45cd-b5fc-5648159b34c5" (UID: "40249106-b634-45cd-b5fc-5648159b34c5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.867717 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-inventory" (OuterVolumeSpecName: "inventory") pod "40249106-b634-45cd-b5fc-5648159b34c5" (UID: "40249106-b634-45cd-b5fc-5648159b34c5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.946889 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.946923 4644 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.946937 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.946951 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tm88b\" (UniqueName: \"kubernetes.io/projected/40249106-b634-45cd-b5fc-5648159b34c5-kube-api-access-tm88b\") on node \"crc\" DevicePath \"\"" Dec 13 07:16:48 crc kubenswrapper[4644]: I1213 07:16:48.946962 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/40249106-b634-45cd-b5fc-5648159b34c5-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:16:49 crc kubenswrapper[4644]: I1213 07:16:49.348386 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" event={"ID":"40249106-b634-45cd-b5fc-5648159b34c5","Type":"ContainerDied","Data":"61dd41dba5d09ade420c6c14faf754fef4d4ee4755b6db8666ffa3c518093c83"} Dec 13 07:16:49 crc kubenswrapper[4644]: I1213 07:16:49.348480 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61dd41dba5d09ade420c6c14faf754fef4d4ee4755b6db8666ffa3c518093c83" Dec 13 07:16:49 crc kubenswrapper[4644]: I1213 07:16:49.348581 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2" Dec 13 07:16:51 crc kubenswrapper[4644]: I1213 07:16:51.389259 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:16:52 crc kubenswrapper[4644]: I1213 07:16:52.372903 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerStarted","Data":"6717e4b811a9dd73f331b0daf8793ce527b10ce24a5fb2d5fd04626c666ac9d3"} Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.030929 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x"] Dec 13 07:17:26 crc kubenswrapper[4644]: E1213 07:17:26.031978 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40249106-b634-45cd-b5fc-5648159b34c5" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.031994 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="40249106-b634-45cd-b5fc-5648159b34c5" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.032181 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="40249106-b634-45cd-b5fc-5648159b34c5" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.032898 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.034794 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.038341 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.038377 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.038689 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.038880 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.044700 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x"] Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.083966 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.084095 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.084126 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.084148 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.084182 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x94kr\" (UniqueName: \"kubernetes.io/projected/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-kube-api-access-x94kr\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.186207 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.186360 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.186412 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.186460 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.186516 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x94kr\" (UniqueName: \"kubernetes.io/projected/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-kube-api-access-x94kr\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.194264 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.194402 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.194404 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.194851 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.203584 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x94kr\" (UniqueName: \"kubernetes.io/projected/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-kube-api-access-x94kr\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.350706 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:26 crc kubenswrapper[4644]: I1213 07:17:26.786599 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x"] Dec 13 07:17:27 crc kubenswrapper[4644]: I1213 07:17:27.636719 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" event={"ID":"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b","Type":"ContainerStarted","Data":"ba003321e42ea24b213e21eb1778e879b25010f7d3901811c02b87ebd69b74ac"} Dec 13 07:17:28 crc kubenswrapper[4644]: I1213 07:17:28.648669 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" event={"ID":"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b","Type":"ContainerStarted","Data":"f2ac6cd4c11cee077939ef0584af27c939e1f6ed5e704a4c8d92e7489459debc"} Dec 13 07:17:28 crc kubenswrapper[4644]: I1213 07:17:28.666653 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" podStartSLOduration=1.803741442 podStartE2EDuration="2.666630557s" podCreationTimestamp="2025-12-13 07:17:26 +0000 UTC" firstStartedPulling="2025-12-13 07:17:26.792792707 +0000 UTC m=+1909.007743539" lastFinishedPulling="2025-12-13 07:17:27.65568182 +0000 UTC m=+1909.870632654" observedRunningTime="2025-12-13 07:17:28.664280288 +0000 UTC m=+1910.879231121" watchObservedRunningTime="2025-12-13 07:17:28.666630557 +0000 UTC m=+1910.881581390" Dec 13 07:17:37 crc kubenswrapper[4644]: I1213 07:17:37.726188 4644 generic.go:334] "Generic (PLEG): container finished" podID="4e75d1e6-dda9-48c7-b6ca-5105f4f6592b" containerID="f2ac6cd4c11cee077939ef0584af27c939e1f6ed5e704a4c8d92e7489459debc" exitCode=0 Dec 13 07:17:37 crc kubenswrapper[4644]: I1213 07:17:37.726286 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" event={"ID":"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b","Type":"ContainerDied","Data":"f2ac6cd4c11cee077939ef0584af27c939e1f6ed5e704a4c8d92e7489459debc"} Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.059834 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.159858 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-ssh-key\") pod \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.159916 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-inventory\") pod \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.161369 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-ceph\") pod \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.161408 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-repo-setup-combined-ca-bundle\") pod \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.161433 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x94kr\" (UniqueName: \"kubernetes.io/projected/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-kube-api-access-x94kr\") pod \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.165660 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-ceph" (OuterVolumeSpecName: "ceph") pod "4e75d1e6-dda9-48c7-b6ca-5105f4f6592b" (UID: "4e75d1e6-dda9-48c7-b6ca-5105f4f6592b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.167460 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-kube-api-access-x94kr" (OuterVolumeSpecName: "kube-api-access-x94kr") pod "4e75d1e6-dda9-48c7-b6ca-5105f4f6592b" (UID: "4e75d1e6-dda9-48c7-b6ca-5105f4f6592b"). InnerVolumeSpecName "kube-api-access-x94kr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.169341 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "4e75d1e6-dda9-48c7-b6ca-5105f4f6592b" (UID: "4e75d1e6-dda9-48c7-b6ca-5105f4f6592b"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:17:39 crc kubenswrapper[4644]: E1213 07:17:39.183353 4644 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-ssh-key podName:4e75d1e6-dda9-48c7-b6ca-5105f4f6592b nodeName:}" failed. No retries permitted until 2025-12-13 07:17:39.683323786 +0000 UTC m=+1921.898274619 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key" (UniqueName: "kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-ssh-key") pod "4e75d1e6-dda9-48c7-b6ca-5105f4f6592b" (UID: "4e75d1e6-dda9-48c7-b6ca-5105f4f6592b") : error deleting /var/lib/kubelet/pods/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b/volume-subpaths: remove /var/lib/kubelet/pods/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b/volume-subpaths: no such file or directory Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.185712 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-inventory" (OuterVolumeSpecName: "inventory") pod "4e75d1e6-dda9-48c7-b6ca-5105f4f6592b" (UID: "4e75d1e6-dda9-48c7-b6ca-5105f4f6592b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.263566 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.263737 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.263809 4644 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.263871 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x94kr\" (UniqueName: \"kubernetes.io/projected/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-kube-api-access-x94kr\") on node \"crc\" DevicePath \"\"" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.741872 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" event={"ID":"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b","Type":"ContainerDied","Data":"ba003321e42ea24b213e21eb1778e879b25010f7d3901811c02b87ebd69b74ac"} Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.742212 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba003321e42ea24b213e21eb1778e879b25010f7d3901811c02b87ebd69b74ac" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.741943 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.773613 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-ssh-key\") pod \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\" (UID: \"4e75d1e6-dda9-48c7-b6ca-5105f4f6592b\") " Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.777297 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4e75d1e6-dda9-48c7-b6ca-5105f4f6592b" (UID: "4e75d1e6-dda9-48c7-b6ca-5105f4f6592b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.819089 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2"] Dec 13 07:17:39 crc kubenswrapper[4644]: E1213 07:17:39.819474 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e75d1e6-dda9-48c7-b6ca-5105f4f6592b" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.819494 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e75d1e6-dda9-48c7-b6ca-5105f4f6592b" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.819672 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e75d1e6-dda9-48c7-b6ca-5105f4f6592b" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.820258 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.830601 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2"] Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.876817 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.876875 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvfrx\" (UniqueName: \"kubernetes.io/projected/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-kube-api-access-wvfrx\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.876912 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.877034 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.877116 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.877666 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4e75d1e6-dda9-48c7-b6ca-5105f4f6592b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.980287 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.980359 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvfrx\" (UniqueName: \"kubernetes.io/projected/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-kube-api-access-wvfrx\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.980407 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.980472 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.980501 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.984770 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.985008 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.985021 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.985354 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:39 crc kubenswrapper[4644]: I1213 07:17:39.995740 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvfrx\" (UniqueName: \"kubernetes.io/projected/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-kube-api-access-wvfrx\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:40 crc kubenswrapper[4644]: I1213 07:17:40.139409 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:17:40 crc kubenswrapper[4644]: I1213 07:17:40.584209 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2"] Dec 13 07:17:40 crc kubenswrapper[4644]: I1213 07:17:40.751581 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" event={"ID":"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3","Type":"ContainerStarted","Data":"d96e5d9d01410cb550a445ebde5edb27f3635a4488d211f4eb10e1474abfdf62"} Dec 13 07:17:41 crc kubenswrapper[4644]: I1213 07:17:41.759840 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" event={"ID":"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3","Type":"ContainerStarted","Data":"458fd6e2b9fe9d800e7421b58edfe19afb757cead5271b7f250c5dcc9e976eca"} Dec 13 07:17:41 crc kubenswrapper[4644]: I1213 07:17:41.784948 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" podStartSLOduration=2.18156645 podStartE2EDuration="2.784927351s" podCreationTimestamp="2025-12-13 07:17:39 +0000 UTC" firstStartedPulling="2025-12-13 07:17:40.590878517 +0000 UTC m=+1922.805829351" lastFinishedPulling="2025-12-13 07:17:41.194239419 +0000 UTC m=+1923.409190252" observedRunningTime="2025-12-13 07:17:41.773888666 +0000 UTC m=+1923.988839499" watchObservedRunningTime="2025-12-13 07:17:41.784927351 +0000 UTC m=+1923.999878184" Dec 13 07:17:42 crc kubenswrapper[4644]: I1213 07:17:42.919203 4644 scope.go:117] "RemoveContainer" containerID="aeed9862032d73f7ea70ae609aa0ecb62c8f9b4e2dcf1c8e8f7e7b88d3e8aaed" Dec 13 07:17:42 crc kubenswrapper[4644]: I1213 07:17:42.969031 4644 scope.go:117] "RemoveContainer" containerID="6610b48084c0e3fa49e04d527357b0a08d8fb3977199d989b768d473c4e596eb" Dec 13 07:18:43 crc kubenswrapper[4644]: I1213 07:18:43.063097 4644 scope.go:117] "RemoveContainer" containerID="8a09bfe52832d6e83a1e04ebc7bab3b5b1eb564faadd8a242c8ba8c990ec05b8" Dec 13 07:18:43 crc kubenswrapper[4644]: I1213 07:18:43.104092 4644 scope.go:117] "RemoveContainer" containerID="5071c8bac49bc486d035a7c60dabb88d797ccf06913a6e4c3b95479214e8ecd1" Dec 13 07:18:43 crc kubenswrapper[4644]: I1213 07:18:43.132249 4644 scope.go:117] "RemoveContainer" containerID="7acf449744166db33db46a3f468f94fc258d92029d295bfac1d828aa727716a6" Dec 13 07:19:03 crc kubenswrapper[4644]: I1213 07:19:03.432264 4644 generic.go:334] "Generic (PLEG): container finished" podID="bfce09ea-ec98-4fd2-a9ee-d9685f5215c3" containerID="458fd6e2b9fe9d800e7421b58edfe19afb757cead5271b7f250c5dcc9e976eca" exitCode=0 Dec 13 07:19:03 crc kubenswrapper[4644]: I1213 07:19:03.432362 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" event={"ID":"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3","Type":"ContainerDied","Data":"458fd6e2b9fe9d800e7421b58edfe19afb757cead5271b7f250c5dcc9e976eca"} Dec 13 07:19:04 crc kubenswrapper[4644]: I1213 07:19:04.773329 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:19:04 crc kubenswrapper[4644]: I1213 07:19:04.944217 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-ssh-key\") pod \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " Dec 13 07:19:04 crc kubenswrapper[4644]: I1213 07:19:04.944411 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-inventory\") pod \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " Dec 13 07:19:04 crc kubenswrapper[4644]: I1213 07:19:04.944464 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvfrx\" (UniqueName: \"kubernetes.io/projected/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-kube-api-access-wvfrx\") pod \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " Dec 13 07:19:04 crc kubenswrapper[4644]: I1213 07:19:04.944644 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-bootstrap-combined-ca-bundle\") pod \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " Dec 13 07:19:04 crc kubenswrapper[4644]: I1213 07:19:04.944688 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-ceph\") pod \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\" (UID: \"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3\") " Dec 13 07:19:04 crc kubenswrapper[4644]: I1213 07:19:04.950334 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-kube-api-access-wvfrx" (OuterVolumeSpecName: "kube-api-access-wvfrx") pod "bfce09ea-ec98-4fd2-a9ee-d9685f5215c3" (UID: "bfce09ea-ec98-4fd2-a9ee-d9685f5215c3"). InnerVolumeSpecName "kube-api-access-wvfrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:19:04 crc kubenswrapper[4644]: I1213 07:19:04.950465 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "bfce09ea-ec98-4fd2-a9ee-d9685f5215c3" (UID: "bfce09ea-ec98-4fd2-a9ee-d9685f5215c3"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:19:04 crc kubenswrapper[4644]: I1213 07:19:04.950708 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-ceph" (OuterVolumeSpecName: "ceph") pod "bfce09ea-ec98-4fd2-a9ee-d9685f5215c3" (UID: "bfce09ea-ec98-4fd2-a9ee-d9685f5215c3"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:19:04 crc kubenswrapper[4644]: I1213 07:19:04.966727 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-inventory" (OuterVolumeSpecName: "inventory") pod "bfce09ea-ec98-4fd2-a9ee-d9685f5215c3" (UID: "bfce09ea-ec98-4fd2-a9ee-d9685f5215c3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:19:04 crc kubenswrapper[4644]: I1213 07:19:04.967241 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bfce09ea-ec98-4fd2-a9ee-d9685f5215c3" (UID: "bfce09ea-ec98-4fd2-a9ee-d9685f5215c3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.048216 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvfrx\" (UniqueName: \"kubernetes.io/projected/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-kube-api-access-wvfrx\") on node \"crc\" DevicePath \"\"" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.048247 4644 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.048257 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.048268 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.048277 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bfce09ea-ec98-4fd2-a9ee-d9685f5215c3-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.452186 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" event={"ID":"bfce09ea-ec98-4fd2-a9ee-d9685f5215c3","Type":"ContainerDied","Data":"d96e5d9d01410cb550a445ebde5edb27f3635a4488d211f4eb10e1474abfdf62"} Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.452569 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d96e5d9d01410cb550a445ebde5edb27f3635a4488d211f4eb10e1474abfdf62" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.452678 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.521596 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf"] Dec 13 07:19:05 crc kubenswrapper[4644]: E1213 07:19:05.522048 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfce09ea-ec98-4fd2-a9ee-d9685f5215c3" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.522070 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfce09ea-ec98-4fd2-a9ee-d9685f5215c3" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.522283 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfce09ea-ec98-4fd2-a9ee-d9685f5215c3" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.523008 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.524589 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.525951 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.526182 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.526353 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.527782 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.531235 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf"] Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.661273 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.661332 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.661670 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dr6j\" (UniqueName: \"kubernetes.io/projected/66298b75-b28a-4f33-9507-9f8ec6bb4079-kube-api-access-9dr6j\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.661742 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.764453 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dr6j\" (UniqueName: \"kubernetes.io/projected/66298b75-b28a-4f33-9507-9f8ec6bb4079-kube-api-access-9dr6j\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.764524 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.764712 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.764753 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.769330 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.769625 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.769911 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.780635 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dr6j\" (UniqueName: \"kubernetes.io/projected/66298b75-b28a-4f33-9507-9f8ec6bb4079-kube-api-access-9dr6j\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:05 crc kubenswrapper[4644]: I1213 07:19:05.839525 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:06 crc kubenswrapper[4644]: I1213 07:19:06.297890 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf"] Dec 13 07:19:06 crc kubenswrapper[4644]: I1213 07:19:06.460863 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" event={"ID":"66298b75-b28a-4f33-9507-9f8ec6bb4079","Type":"ContainerStarted","Data":"956a40f9f44db276354614cd0c3be055ac9f16cf0742480a83b7aae464d92460"} Dec 13 07:19:08 crc kubenswrapper[4644]: I1213 07:19:08.476342 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" event={"ID":"66298b75-b28a-4f33-9507-9f8ec6bb4079","Type":"ContainerStarted","Data":"7e0b36a3538bcd10dd6dd607bf492378b5baf4bbbdbfd3d9d4cfc71e382d106e"} Dec 13 07:19:08 crc kubenswrapper[4644]: I1213 07:19:08.494663 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" podStartSLOduration=2.434563228 podStartE2EDuration="3.494644617s" podCreationTimestamp="2025-12-13 07:19:05 +0000 UTC" firstStartedPulling="2025-12-13 07:19:06.304957234 +0000 UTC m=+2008.519908067" lastFinishedPulling="2025-12-13 07:19:07.365038623 +0000 UTC m=+2009.579989456" observedRunningTime="2025-12-13 07:19:08.488223995 +0000 UTC m=+2010.703174828" watchObservedRunningTime="2025-12-13 07:19:08.494644617 +0000 UTC m=+2010.709595450" Dec 13 07:19:09 crc kubenswrapper[4644]: I1213 07:19:09.753954 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:19:09 crc kubenswrapper[4644]: I1213 07:19:09.754248 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:19:26 crc kubenswrapper[4644]: I1213 07:19:26.617845 4644 generic.go:334] "Generic (PLEG): container finished" podID="66298b75-b28a-4f33-9507-9f8ec6bb4079" containerID="7e0b36a3538bcd10dd6dd607bf492378b5baf4bbbdbfd3d9d4cfc71e382d106e" exitCode=0 Dec 13 07:19:26 crc kubenswrapper[4644]: I1213 07:19:26.617918 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" event={"ID":"66298b75-b28a-4f33-9507-9f8ec6bb4079","Type":"ContainerDied","Data":"7e0b36a3538bcd10dd6dd607bf492378b5baf4bbbdbfd3d9d4cfc71e382d106e"} Dec 13 07:19:27 crc kubenswrapper[4644]: I1213 07:19:27.957066 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.031514 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-ssh-key\") pod \"66298b75-b28a-4f33-9507-9f8ec6bb4079\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.031576 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-ceph\") pod \"66298b75-b28a-4f33-9507-9f8ec6bb4079\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.031761 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-inventory\") pod \"66298b75-b28a-4f33-9507-9f8ec6bb4079\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.031913 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dr6j\" (UniqueName: \"kubernetes.io/projected/66298b75-b28a-4f33-9507-9f8ec6bb4079-kube-api-access-9dr6j\") pod \"66298b75-b28a-4f33-9507-9f8ec6bb4079\" (UID: \"66298b75-b28a-4f33-9507-9f8ec6bb4079\") " Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.037305 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66298b75-b28a-4f33-9507-9f8ec6bb4079-kube-api-access-9dr6j" (OuterVolumeSpecName: "kube-api-access-9dr6j") pod "66298b75-b28a-4f33-9507-9f8ec6bb4079" (UID: "66298b75-b28a-4f33-9507-9f8ec6bb4079"). InnerVolumeSpecName "kube-api-access-9dr6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.037316 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-ceph" (OuterVolumeSpecName: "ceph") pod "66298b75-b28a-4f33-9507-9f8ec6bb4079" (UID: "66298b75-b28a-4f33-9507-9f8ec6bb4079"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.054554 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-inventory" (OuterVolumeSpecName: "inventory") pod "66298b75-b28a-4f33-9507-9f8ec6bb4079" (UID: "66298b75-b28a-4f33-9507-9f8ec6bb4079"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.055018 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "66298b75-b28a-4f33-9507-9f8ec6bb4079" (UID: "66298b75-b28a-4f33-9507-9f8ec6bb4079"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.135939 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dr6j\" (UniqueName: \"kubernetes.io/projected/66298b75-b28a-4f33-9507-9f8ec6bb4079-kube-api-access-9dr6j\") on node \"crc\" DevicePath \"\"" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.135992 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.136005 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.136017 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66298b75-b28a-4f33-9507-9f8ec6bb4079-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.640398 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" event={"ID":"66298b75-b28a-4f33-9507-9f8ec6bb4079","Type":"ContainerDied","Data":"956a40f9f44db276354614cd0c3be055ac9f16cf0742480a83b7aae464d92460"} Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.640495 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="956a40f9f44db276354614cd0c3be055ac9f16cf0742480a83b7aae464d92460" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.640506 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.700034 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz"] Dec 13 07:19:28 crc kubenswrapper[4644]: E1213 07:19:28.700403 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66298b75-b28a-4f33-9507-9f8ec6bb4079" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.700429 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="66298b75-b28a-4f33-9507-9f8ec6bb4079" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.700643 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="66298b75-b28a-4f33-9507-9f8ec6bb4079" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.701184 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.702879 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.703495 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.703675 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.704390 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.709259 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.711155 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz"] Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.747569 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.747671 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.747728 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.748044 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdtbq\" (UniqueName: \"kubernetes.io/projected/57f610f9-fbe1-408a-b364-c395f20690dd-kube-api-access-bdtbq\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.851224 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdtbq\" (UniqueName: \"kubernetes.io/projected/57f610f9-fbe1-408a-b364-c395f20690dd-kube-api-access-bdtbq\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.851360 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.851424 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.851491 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.857811 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.857986 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.858275 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:28 crc kubenswrapper[4644]: I1213 07:19:28.866626 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdtbq\" (UniqueName: \"kubernetes.io/projected/57f610f9-fbe1-408a-b364-c395f20690dd-kube-api-access-bdtbq\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:29 crc kubenswrapper[4644]: I1213 07:19:29.019061 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:29 crc kubenswrapper[4644]: I1213 07:19:29.484675 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz"] Dec 13 07:19:29 crc kubenswrapper[4644]: I1213 07:19:29.650856 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" event={"ID":"57f610f9-fbe1-408a-b364-c395f20690dd","Type":"ContainerStarted","Data":"3abbc6d236e329727c90e601810db0171b0f7688e9967ce7be1013ec566f6df1"} Dec 13 07:19:30 crc kubenswrapper[4644]: I1213 07:19:30.661811 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" event={"ID":"57f610f9-fbe1-408a-b364-c395f20690dd","Type":"ContainerStarted","Data":"6aadd7ffe34806b7d43db43fce3942c42c2832e8c2e834ba245d78782c0f71d5"} Dec 13 07:19:30 crc kubenswrapper[4644]: I1213 07:19:30.683482 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" podStartSLOduration=2.117325567 podStartE2EDuration="2.683465382s" podCreationTimestamp="2025-12-13 07:19:28 +0000 UTC" firstStartedPulling="2025-12-13 07:19:29.491511006 +0000 UTC m=+2031.706461840" lastFinishedPulling="2025-12-13 07:19:30.057650821 +0000 UTC m=+2032.272601655" observedRunningTime="2025-12-13 07:19:30.678669724 +0000 UTC m=+2032.893620558" watchObservedRunningTime="2025-12-13 07:19:30.683465382 +0000 UTC m=+2032.898416215" Dec 13 07:19:34 crc kubenswrapper[4644]: I1213 07:19:34.709078 4644 generic.go:334] "Generic (PLEG): container finished" podID="57f610f9-fbe1-408a-b364-c395f20690dd" containerID="6aadd7ffe34806b7d43db43fce3942c42c2832e8c2e834ba245d78782c0f71d5" exitCode=0 Dec 13 07:19:34 crc kubenswrapper[4644]: I1213 07:19:34.709184 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" event={"ID":"57f610f9-fbe1-408a-b364-c395f20690dd","Type":"ContainerDied","Data":"6aadd7ffe34806b7d43db43fce3942c42c2832e8c2e834ba245d78782c0f71d5"} Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.042941 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.108998 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-inventory\") pod \"57f610f9-fbe1-408a-b364-c395f20690dd\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.109074 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-ssh-key\") pod \"57f610f9-fbe1-408a-b364-c395f20690dd\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.109125 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdtbq\" (UniqueName: \"kubernetes.io/projected/57f610f9-fbe1-408a-b364-c395f20690dd-kube-api-access-bdtbq\") pod \"57f610f9-fbe1-408a-b364-c395f20690dd\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.109178 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-ceph\") pod \"57f610f9-fbe1-408a-b364-c395f20690dd\" (UID: \"57f610f9-fbe1-408a-b364-c395f20690dd\") " Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.116149 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-ceph" (OuterVolumeSpecName: "ceph") pod "57f610f9-fbe1-408a-b364-c395f20690dd" (UID: "57f610f9-fbe1-408a-b364-c395f20690dd"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.116759 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57f610f9-fbe1-408a-b364-c395f20690dd-kube-api-access-bdtbq" (OuterVolumeSpecName: "kube-api-access-bdtbq") pod "57f610f9-fbe1-408a-b364-c395f20690dd" (UID: "57f610f9-fbe1-408a-b364-c395f20690dd"). InnerVolumeSpecName "kube-api-access-bdtbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.135004 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-inventory" (OuterVolumeSpecName: "inventory") pod "57f610f9-fbe1-408a-b364-c395f20690dd" (UID: "57f610f9-fbe1-408a-b364-c395f20690dd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.135303 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "57f610f9-fbe1-408a-b364-c395f20690dd" (UID: "57f610f9-fbe1-408a-b364-c395f20690dd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.211964 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.212002 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdtbq\" (UniqueName: \"kubernetes.io/projected/57f610f9-fbe1-408a-b364-c395f20690dd-kube-api-access-bdtbq\") on node \"crc\" DevicePath \"\"" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.212019 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.212031 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/57f610f9-fbe1-408a-b364-c395f20690dd-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.736459 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" event={"ID":"57f610f9-fbe1-408a-b364-c395f20690dd","Type":"ContainerDied","Data":"3abbc6d236e329727c90e601810db0171b0f7688e9967ce7be1013ec566f6df1"} Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.736505 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3abbc6d236e329727c90e601810db0171b0f7688e9967ce7be1013ec566f6df1" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.736522 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.788148 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx"] Dec 13 07:19:36 crc kubenswrapper[4644]: E1213 07:19:36.788596 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57f610f9-fbe1-408a-b364-c395f20690dd" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.788619 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="57f610f9-fbe1-408a-b364-c395f20690dd" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.788865 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="57f610f9-fbe1-408a-b364-c395f20690dd" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.789537 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.791946 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.792054 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.792201 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.792251 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.792655 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.797532 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx"] Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.926606 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9zlvx\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.926661 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lshj\" (UniqueName: \"kubernetes.io/projected/1cd50117-b865-4351-a364-a283893634ce-kube-api-access-9lshj\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9zlvx\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.926738 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9zlvx\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:19:36 crc kubenswrapper[4644]: I1213 07:19:36.926801 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9zlvx\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:19:37 crc kubenswrapper[4644]: I1213 07:19:37.029009 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9zlvx\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:19:37 crc kubenswrapper[4644]: I1213 07:19:37.029057 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lshj\" (UniqueName: \"kubernetes.io/projected/1cd50117-b865-4351-a364-a283893634ce-kube-api-access-9lshj\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9zlvx\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:19:37 crc kubenswrapper[4644]: I1213 07:19:37.029106 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9zlvx\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:19:37 crc kubenswrapper[4644]: I1213 07:19:37.029146 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9zlvx\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:19:37 crc kubenswrapper[4644]: I1213 07:19:37.034153 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9zlvx\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:19:37 crc kubenswrapper[4644]: I1213 07:19:37.034532 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9zlvx\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:19:37 crc kubenswrapper[4644]: I1213 07:19:37.034908 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9zlvx\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:19:37 crc kubenswrapper[4644]: I1213 07:19:37.043726 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lshj\" (UniqueName: \"kubernetes.io/projected/1cd50117-b865-4351-a364-a283893634ce-kube-api-access-9lshj\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9zlvx\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:19:37 crc kubenswrapper[4644]: I1213 07:19:37.103545 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:19:37 crc kubenswrapper[4644]: I1213 07:19:37.571936 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx"] Dec 13 07:19:37 crc kubenswrapper[4644]: I1213 07:19:37.744181 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" event={"ID":"1cd50117-b865-4351-a364-a283893634ce","Type":"ContainerStarted","Data":"81baa4a47250b5aaecad6b07df5e3178f209e5c683a11f7c62ebbd11f4fc0a63"} Dec 13 07:19:38 crc kubenswrapper[4644]: I1213 07:19:38.752613 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" event={"ID":"1cd50117-b865-4351-a364-a283893634ce","Type":"ContainerStarted","Data":"a7f8ba4163eebc5cc8087055b6a8196914a75628ac5c68f4f84d63ba0feb5803"} Dec 13 07:19:38 crc kubenswrapper[4644]: I1213 07:19:38.770277 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" podStartSLOduration=2.27959265 podStartE2EDuration="2.770259846s" podCreationTimestamp="2025-12-13 07:19:36 +0000 UTC" firstStartedPulling="2025-12-13 07:19:37.57753446 +0000 UTC m=+2039.792485294" lastFinishedPulling="2025-12-13 07:19:38.068201657 +0000 UTC m=+2040.283152490" observedRunningTime="2025-12-13 07:19:38.7669486 +0000 UTC m=+2040.981899433" watchObservedRunningTime="2025-12-13 07:19:38.770259846 +0000 UTC m=+2040.985210678" Dec 13 07:19:39 crc kubenswrapper[4644]: I1213 07:19:39.754238 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:19:39 crc kubenswrapper[4644]: I1213 07:19:39.754596 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:20:04 crc kubenswrapper[4644]: I1213 07:20:04.984299 4644 generic.go:334] "Generic (PLEG): container finished" podID="1cd50117-b865-4351-a364-a283893634ce" containerID="a7f8ba4163eebc5cc8087055b6a8196914a75628ac5c68f4f84d63ba0feb5803" exitCode=0 Dec 13 07:20:04 crc kubenswrapper[4644]: I1213 07:20:04.984392 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" event={"ID":"1cd50117-b865-4351-a364-a283893634ce","Type":"ContainerDied","Data":"a7f8ba4163eebc5cc8087055b6a8196914a75628ac5c68f4f84d63ba0feb5803"} Dec 13 07:20:06 crc kubenswrapper[4644]: I1213 07:20:06.317585 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:20:06 crc kubenswrapper[4644]: I1213 07:20:06.512150 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lshj\" (UniqueName: \"kubernetes.io/projected/1cd50117-b865-4351-a364-a283893634ce-kube-api-access-9lshj\") pod \"1cd50117-b865-4351-a364-a283893634ce\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " Dec 13 07:20:06 crc kubenswrapper[4644]: I1213 07:20:06.512436 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-inventory\") pod \"1cd50117-b865-4351-a364-a283893634ce\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " Dec 13 07:20:06 crc kubenswrapper[4644]: I1213 07:20:06.512578 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-ssh-key\") pod \"1cd50117-b865-4351-a364-a283893634ce\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " Dec 13 07:20:06 crc kubenswrapper[4644]: I1213 07:20:06.512613 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-ceph\") pod \"1cd50117-b865-4351-a364-a283893634ce\" (UID: \"1cd50117-b865-4351-a364-a283893634ce\") " Dec 13 07:20:06 crc kubenswrapper[4644]: I1213 07:20:06.519908 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-ceph" (OuterVolumeSpecName: "ceph") pod "1cd50117-b865-4351-a364-a283893634ce" (UID: "1cd50117-b865-4351-a364-a283893634ce"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:20:06 crc kubenswrapper[4644]: I1213 07:20:06.520412 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cd50117-b865-4351-a364-a283893634ce-kube-api-access-9lshj" (OuterVolumeSpecName: "kube-api-access-9lshj") pod "1cd50117-b865-4351-a364-a283893634ce" (UID: "1cd50117-b865-4351-a364-a283893634ce"). InnerVolumeSpecName "kube-api-access-9lshj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:20:06 crc kubenswrapper[4644]: I1213 07:20:06.538640 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1cd50117-b865-4351-a364-a283893634ce" (UID: "1cd50117-b865-4351-a364-a283893634ce"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:20:06 crc kubenswrapper[4644]: I1213 07:20:06.540815 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-inventory" (OuterVolumeSpecName: "inventory") pod "1cd50117-b865-4351-a364-a283893634ce" (UID: "1cd50117-b865-4351-a364-a283893634ce"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:20:06 crc kubenswrapper[4644]: I1213 07:20:06.616545 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lshj\" (UniqueName: \"kubernetes.io/projected/1cd50117-b865-4351-a364-a283893634ce-kube-api-access-9lshj\") on node \"crc\" DevicePath \"\"" Dec 13 07:20:06 crc kubenswrapper[4644]: I1213 07:20:06.616585 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:20:06 crc kubenswrapper[4644]: I1213 07:20:06.616596 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:20:06 crc kubenswrapper[4644]: I1213 07:20:06.616608 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1cd50117-b865-4351-a364-a283893634ce-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.022177 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" event={"ID":"1cd50117-b865-4351-a364-a283893634ce","Type":"ContainerDied","Data":"81baa4a47250b5aaecad6b07df5e3178f209e5c683a11f7c62ebbd11f4fc0a63"} Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.022503 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81baa4a47250b5aaecad6b07df5e3178f209e5c683a11f7c62ebbd11f4fc0a63" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.022589 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9zlvx" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.080561 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb"] Dec 13 07:20:07 crc kubenswrapper[4644]: E1213 07:20:07.081025 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cd50117-b865-4351-a364-a283893634ce" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.081046 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cd50117-b865-4351-a364-a283893634ce" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.081289 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cd50117-b865-4351-a364-a283893634ce" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.082000 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.084636 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.085057 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.085217 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.085436 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.085595 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.087054 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb"] Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.129597 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.129751 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.129843 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnpq9\" (UniqueName: \"kubernetes.io/projected/f90586e1-30df-422c-b8c7-fdd4fac3112b-kube-api-access-gnpq9\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.129949 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.231876 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.232014 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.232061 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnpq9\" (UniqueName: \"kubernetes.io/projected/f90586e1-30df-422c-b8c7-fdd4fac3112b-kube-api-access-gnpq9\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.232191 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.237221 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.237296 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.237780 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.246307 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnpq9\" (UniqueName: \"kubernetes.io/projected/f90586e1-30df-422c-b8c7-fdd4fac3112b-kube-api-access-gnpq9\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.397955 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:07 crc kubenswrapper[4644]: I1213 07:20:07.949804 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb"] Dec 13 07:20:08 crc kubenswrapper[4644]: I1213 07:20:08.029869 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" event={"ID":"f90586e1-30df-422c-b8c7-fdd4fac3112b","Type":"ContainerStarted","Data":"d8e3740f84fcd54a9bb30979a535c3d73e485acd27248f7076dc7ddb6280df53"} Dec 13 07:20:09 crc kubenswrapper[4644]: I1213 07:20:09.042487 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" event={"ID":"f90586e1-30df-422c-b8c7-fdd4fac3112b","Type":"ContainerStarted","Data":"d8ebcd7f822cf7819e9fb06bb1c6555be31d5b9123b6946929e90fa437ace897"} Dec 13 07:20:09 crc kubenswrapper[4644]: I1213 07:20:09.064768 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" podStartSLOduration=1.335779182 podStartE2EDuration="2.064749633s" podCreationTimestamp="2025-12-13 07:20:07 +0000 UTC" firstStartedPulling="2025-12-13 07:20:07.957573755 +0000 UTC m=+2070.172524588" lastFinishedPulling="2025-12-13 07:20:08.686544206 +0000 UTC m=+2070.901495039" observedRunningTime="2025-12-13 07:20:09.059309895 +0000 UTC m=+2071.274260728" watchObservedRunningTime="2025-12-13 07:20:09.064749633 +0000 UTC m=+2071.279700467" Dec 13 07:20:09 crc kubenswrapper[4644]: I1213 07:20:09.753375 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:20:09 crc kubenswrapper[4644]: I1213 07:20:09.753478 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:20:09 crc kubenswrapper[4644]: I1213 07:20:09.753538 4644 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 07:20:09 crc kubenswrapper[4644]: I1213 07:20:09.754524 4644 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6717e4b811a9dd73f331b0daf8793ce527b10ce24a5fb2d5fd04626c666ac9d3"} pod="openshift-machine-config-operator/machine-config-daemon-45tj4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 07:20:09 crc kubenswrapper[4644]: I1213 07:20:09.754594 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" containerID="cri-o://6717e4b811a9dd73f331b0daf8793ce527b10ce24a5fb2d5fd04626c666ac9d3" gracePeriod=600 Dec 13 07:20:10 crc kubenswrapper[4644]: I1213 07:20:10.054356 4644 generic.go:334] "Generic (PLEG): container finished" podID="48240f19-087e-4597-b448-ab1a190a5027" containerID="6717e4b811a9dd73f331b0daf8793ce527b10ce24a5fb2d5fd04626c666ac9d3" exitCode=0 Dec 13 07:20:10 crc kubenswrapper[4644]: I1213 07:20:10.054414 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerDied","Data":"6717e4b811a9dd73f331b0daf8793ce527b10ce24a5fb2d5fd04626c666ac9d3"} Dec 13 07:20:10 crc kubenswrapper[4644]: I1213 07:20:10.054805 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerStarted","Data":"385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec"} Dec 13 07:20:10 crc kubenswrapper[4644]: I1213 07:20:10.054835 4644 scope.go:117] "RemoveContainer" containerID="5672b909efe43612edcd71965adb565a8bdfabfb0db7a93589c36a33ab755130" Dec 13 07:20:12 crc kubenswrapper[4644]: I1213 07:20:12.076525 4644 generic.go:334] "Generic (PLEG): container finished" podID="f90586e1-30df-422c-b8c7-fdd4fac3112b" containerID="d8ebcd7f822cf7819e9fb06bb1c6555be31d5b9123b6946929e90fa437ace897" exitCode=0 Dec 13 07:20:12 crc kubenswrapper[4644]: I1213 07:20:12.076601 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" event={"ID":"f90586e1-30df-422c-b8c7-fdd4fac3112b","Type":"ContainerDied","Data":"d8ebcd7f822cf7819e9fb06bb1c6555be31d5b9123b6946929e90fa437ace897"} Dec 13 07:20:13 crc kubenswrapper[4644]: I1213 07:20:13.372674 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:13 crc kubenswrapper[4644]: I1213 07:20:13.469585 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-ssh-key\") pod \"f90586e1-30df-422c-b8c7-fdd4fac3112b\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " Dec 13 07:20:13 crc kubenswrapper[4644]: I1213 07:20:13.469635 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-ceph\") pod \"f90586e1-30df-422c-b8c7-fdd4fac3112b\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " Dec 13 07:20:13 crc kubenswrapper[4644]: I1213 07:20:13.469698 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnpq9\" (UniqueName: \"kubernetes.io/projected/f90586e1-30df-422c-b8c7-fdd4fac3112b-kube-api-access-gnpq9\") pod \"f90586e1-30df-422c-b8c7-fdd4fac3112b\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " Dec 13 07:20:13 crc kubenswrapper[4644]: I1213 07:20:13.469799 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-inventory\") pod \"f90586e1-30df-422c-b8c7-fdd4fac3112b\" (UID: \"f90586e1-30df-422c-b8c7-fdd4fac3112b\") " Dec 13 07:20:13 crc kubenswrapper[4644]: I1213 07:20:13.475746 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-ceph" (OuterVolumeSpecName: "ceph") pod "f90586e1-30df-422c-b8c7-fdd4fac3112b" (UID: "f90586e1-30df-422c-b8c7-fdd4fac3112b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:20:13 crc kubenswrapper[4644]: I1213 07:20:13.475829 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f90586e1-30df-422c-b8c7-fdd4fac3112b-kube-api-access-gnpq9" (OuterVolumeSpecName: "kube-api-access-gnpq9") pod "f90586e1-30df-422c-b8c7-fdd4fac3112b" (UID: "f90586e1-30df-422c-b8c7-fdd4fac3112b"). InnerVolumeSpecName "kube-api-access-gnpq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:20:13 crc kubenswrapper[4644]: I1213 07:20:13.491745 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f90586e1-30df-422c-b8c7-fdd4fac3112b" (UID: "f90586e1-30df-422c-b8c7-fdd4fac3112b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:20:13 crc kubenswrapper[4644]: I1213 07:20:13.493111 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-inventory" (OuterVolumeSpecName: "inventory") pod "f90586e1-30df-422c-b8c7-fdd4fac3112b" (UID: "f90586e1-30df-422c-b8c7-fdd4fac3112b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:20:13 crc kubenswrapper[4644]: I1213 07:20:13.572252 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:20:13 crc kubenswrapper[4644]: I1213 07:20:13.572285 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:20:13 crc kubenswrapper[4644]: I1213 07:20:13.572296 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnpq9\" (UniqueName: \"kubernetes.io/projected/f90586e1-30df-422c-b8c7-fdd4fac3112b-kube-api-access-gnpq9\") on node \"crc\" DevicePath \"\"" Dec 13 07:20:13 crc kubenswrapper[4644]: I1213 07:20:13.572309 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f90586e1-30df-422c-b8c7-fdd4fac3112b-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.097679 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" event={"ID":"f90586e1-30df-422c-b8c7-fdd4fac3112b","Type":"ContainerDied","Data":"d8e3740f84fcd54a9bb30979a535c3d73e485acd27248f7076dc7ddb6280df53"} Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.097962 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d8e3740f84fcd54a9bb30979a535c3d73e485acd27248f7076dc7ddb6280df53" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.097757 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.159005 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p"] Dec 13 07:20:14 crc kubenswrapper[4644]: E1213 07:20:14.159392 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f90586e1-30df-422c-b8c7-fdd4fac3112b" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.159415 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f90586e1-30df-422c-b8c7-fdd4fac3112b" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.159598 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f90586e1-30df-422c-b8c7-fdd4fac3112b" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.160137 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.165016 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.165180 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.165319 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.165834 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.165956 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.174697 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p"] Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.184392 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.184487 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.184523 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzvq7\" (UniqueName: \"kubernetes.io/projected/61d39411-4b48-4ea1-b9f9-aa161d05ca46-kube-api-access-zzvq7\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.184554 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.285475 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.285603 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.285643 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzvq7\" (UniqueName: \"kubernetes.io/projected/61d39411-4b48-4ea1-b9f9-aa161d05ca46-kube-api-access-zzvq7\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.285686 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.291069 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.291960 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.292036 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.302484 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzvq7\" (UniqueName: \"kubernetes.io/projected/61d39411-4b48-4ea1-b9f9-aa161d05ca46-kube-api-access-zzvq7\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.494120 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:14 crc kubenswrapper[4644]: I1213 07:20:14.956897 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p"] Dec 13 07:20:14 crc kubenswrapper[4644]: W1213 07:20:14.960944 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61d39411_4b48_4ea1_b9f9_aa161d05ca46.slice/crio-779fd2b7754cc2dbc0b5f9b2b02aa99eae65a6c1418ce3b3da2776831f89115d WatchSource:0}: Error finding container 779fd2b7754cc2dbc0b5f9b2b02aa99eae65a6c1418ce3b3da2776831f89115d: Status 404 returned error can't find the container with id 779fd2b7754cc2dbc0b5f9b2b02aa99eae65a6c1418ce3b3da2776831f89115d Dec 13 07:20:15 crc kubenswrapper[4644]: I1213 07:20:15.110698 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" event={"ID":"61d39411-4b48-4ea1-b9f9-aa161d05ca46","Type":"ContainerStarted","Data":"779fd2b7754cc2dbc0b5f9b2b02aa99eae65a6c1418ce3b3da2776831f89115d"} Dec 13 07:20:16 crc kubenswrapper[4644]: I1213 07:20:16.125400 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" event={"ID":"61d39411-4b48-4ea1-b9f9-aa161d05ca46","Type":"ContainerStarted","Data":"a085162f33ec3b7caf939b2cd2c3fca0fba96b4de86efe7fddf93d2b2aa3f35a"} Dec 13 07:20:16 crc kubenswrapper[4644]: I1213 07:20:16.144157 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" podStartSLOduration=1.622517045 podStartE2EDuration="2.144129687s" podCreationTimestamp="2025-12-13 07:20:14 +0000 UTC" firstStartedPulling="2025-12-13 07:20:14.963728725 +0000 UTC m=+2077.178679558" lastFinishedPulling="2025-12-13 07:20:15.485341367 +0000 UTC m=+2077.700292200" observedRunningTime="2025-12-13 07:20:16.143153421 +0000 UTC m=+2078.358104254" watchObservedRunningTime="2025-12-13 07:20:16.144129687 +0000 UTC m=+2078.359080521" Dec 13 07:20:46 crc kubenswrapper[4644]: E1213 07:20:46.486985 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61d39411_4b48_4ea1_b9f9_aa161d05ca46.slice/crio-conmon-a085162f33ec3b7caf939b2cd2c3fca0fba96b4de86efe7fddf93d2b2aa3f35a.scope\": RecentStats: unable to find data in memory cache]" Dec 13 07:20:47 crc kubenswrapper[4644]: I1213 07:20:47.390074 4644 generic.go:334] "Generic (PLEG): container finished" podID="61d39411-4b48-4ea1-b9f9-aa161d05ca46" containerID="a085162f33ec3b7caf939b2cd2c3fca0fba96b4de86efe7fddf93d2b2aa3f35a" exitCode=0 Dec 13 07:20:47 crc kubenswrapper[4644]: I1213 07:20:47.390123 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" event={"ID":"61d39411-4b48-4ea1-b9f9-aa161d05ca46","Type":"ContainerDied","Data":"a085162f33ec3b7caf939b2cd2c3fca0fba96b4de86efe7fddf93d2b2aa3f35a"} Dec 13 07:20:47 crc kubenswrapper[4644]: I1213 07:20:47.806514 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dphsh"] Dec 13 07:20:47 crc kubenswrapper[4644]: I1213 07:20:47.808310 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:20:47 crc kubenswrapper[4644]: I1213 07:20:47.826687 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dphsh"] Dec 13 07:20:47 crc kubenswrapper[4644]: I1213 07:20:47.954073 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77zrk\" (UniqueName: \"kubernetes.io/projected/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-kube-api-access-77zrk\") pod \"redhat-marketplace-dphsh\" (UID: \"b8e25c17-c7ad-432c-8f80-b48bdd02b51e\") " pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:20:47 crc kubenswrapper[4644]: I1213 07:20:47.954462 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-utilities\") pod \"redhat-marketplace-dphsh\" (UID: \"b8e25c17-c7ad-432c-8f80-b48bdd02b51e\") " pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:20:47 crc kubenswrapper[4644]: I1213 07:20:47.954528 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-catalog-content\") pod \"redhat-marketplace-dphsh\" (UID: \"b8e25c17-c7ad-432c-8f80-b48bdd02b51e\") " pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.056278 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77zrk\" (UniqueName: \"kubernetes.io/projected/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-kube-api-access-77zrk\") pod \"redhat-marketplace-dphsh\" (UID: \"b8e25c17-c7ad-432c-8f80-b48bdd02b51e\") " pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.056435 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-utilities\") pod \"redhat-marketplace-dphsh\" (UID: \"b8e25c17-c7ad-432c-8f80-b48bdd02b51e\") " pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.056530 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-catalog-content\") pod \"redhat-marketplace-dphsh\" (UID: \"b8e25c17-c7ad-432c-8f80-b48bdd02b51e\") " pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.056984 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-utilities\") pod \"redhat-marketplace-dphsh\" (UID: \"b8e25c17-c7ad-432c-8f80-b48bdd02b51e\") " pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.057035 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-catalog-content\") pod \"redhat-marketplace-dphsh\" (UID: \"b8e25c17-c7ad-432c-8f80-b48bdd02b51e\") " pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.075341 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77zrk\" (UniqueName: \"kubernetes.io/projected/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-kube-api-access-77zrk\") pod \"redhat-marketplace-dphsh\" (UID: \"b8e25c17-c7ad-432c-8f80-b48bdd02b51e\") " pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.123648 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.521815 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dphsh"] Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.695663 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.873203 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-ceph\") pod \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.873951 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-ssh-key\") pod \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.874128 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-inventory\") pod \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.874198 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzvq7\" (UniqueName: \"kubernetes.io/projected/61d39411-4b48-4ea1-b9f9-aa161d05ca46-kube-api-access-zzvq7\") pod \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\" (UID: \"61d39411-4b48-4ea1-b9f9-aa161d05ca46\") " Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.880960 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-ceph" (OuterVolumeSpecName: "ceph") pod "61d39411-4b48-4ea1-b9f9-aa161d05ca46" (UID: "61d39411-4b48-4ea1-b9f9-aa161d05ca46"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.881174 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61d39411-4b48-4ea1-b9f9-aa161d05ca46-kube-api-access-zzvq7" (OuterVolumeSpecName: "kube-api-access-zzvq7") pod "61d39411-4b48-4ea1-b9f9-aa161d05ca46" (UID: "61d39411-4b48-4ea1-b9f9-aa161d05ca46"). InnerVolumeSpecName "kube-api-access-zzvq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.898277 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "61d39411-4b48-4ea1-b9f9-aa161d05ca46" (UID: "61d39411-4b48-4ea1-b9f9-aa161d05ca46"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.899792 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-inventory" (OuterVolumeSpecName: "inventory") pod "61d39411-4b48-4ea1-b9f9-aa161d05ca46" (UID: "61d39411-4b48-4ea1-b9f9-aa161d05ca46"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.978078 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.978111 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.978123 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzvq7\" (UniqueName: \"kubernetes.io/projected/61d39411-4b48-4ea1-b9f9-aa161d05ca46-kube-api-access-zzvq7\") on node \"crc\" DevicePath \"\"" Dec 13 07:20:48 crc kubenswrapper[4644]: I1213 07:20:48.978137 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/61d39411-4b48-4ea1-b9f9-aa161d05ca46-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.414095 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" event={"ID":"61d39411-4b48-4ea1-b9f9-aa161d05ca46","Type":"ContainerDied","Data":"779fd2b7754cc2dbc0b5f9b2b02aa99eae65a6c1418ce3b3da2776831f89115d"} Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.414674 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="779fd2b7754cc2dbc0b5f9b2b02aa99eae65a6c1418ce3b3da2776831f89115d" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.414133 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.417222 4644 generic.go:334] "Generic (PLEG): container finished" podID="b8e25c17-c7ad-432c-8f80-b48bdd02b51e" containerID="afcc8fd01c9e3e4ea3fedc7a8739f4f75702d3ed7e40655d712c88e0e3807c30" exitCode=0 Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.417286 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dphsh" event={"ID":"b8e25c17-c7ad-432c-8f80-b48bdd02b51e","Type":"ContainerDied","Data":"afcc8fd01c9e3e4ea3fedc7a8739f4f75702d3ed7e40655d712c88e0e3807c30"} Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.417324 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dphsh" event={"ID":"b8e25c17-c7ad-432c-8f80-b48bdd02b51e","Type":"ContainerStarted","Data":"c244a68ed295d77c84bc3e35a30579a9bcd1a3310e1d01da19f321c7f34a9457"} Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.478512 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-r94dw"] Dec 13 07:20:49 crc kubenswrapper[4644]: E1213 07:20:49.478905 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d39411-4b48-4ea1-b9f9-aa161d05ca46" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.478926 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d39411-4b48-4ea1-b9f9-aa161d05ca46" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.479111 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="61d39411-4b48-4ea1-b9f9-aa161d05ca46" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.479692 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.483774 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.483914 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.483976 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.483779 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.484256 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.488781 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-r94dw"] Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.588809 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mxph\" (UniqueName: \"kubernetes.io/projected/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-kube-api-access-8mxph\") pod \"ssh-known-hosts-edpm-deployment-r94dw\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.588859 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-ceph\") pod \"ssh-known-hosts-edpm-deployment-r94dw\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.588961 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-r94dw\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.589271 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-r94dw\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.691114 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-r94dw\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.691273 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-r94dw\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.691324 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mxph\" (UniqueName: \"kubernetes.io/projected/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-kube-api-access-8mxph\") pod \"ssh-known-hosts-edpm-deployment-r94dw\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.691348 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-ceph\") pod \"ssh-known-hosts-edpm-deployment-r94dw\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.696932 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-r94dw\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.696949 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-r94dw\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.697659 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-ceph\") pod \"ssh-known-hosts-edpm-deployment-r94dw\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.710711 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mxph\" (UniqueName: \"kubernetes.io/projected/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-kube-api-access-8mxph\") pod \"ssh-known-hosts-edpm-deployment-r94dw\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.797753 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.997240 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wj84q"] Dec 13 07:20:49 crc kubenswrapper[4644]: I1213 07:20:49.999433 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.007291 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wj84q"] Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.098522 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-utilities\") pod \"community-operators-wj84q\" (UID: \"244f25c5-3fe1-4b4f-a0f3-5fe163f20035\") " pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.098669 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-catalog-content\") pod \"community-operators-wj84q\" (UID: \"244f25c5-3fe1-4b4f-a0f3-5fe163f20035\") " pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.098714 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nkts\" (UniqueName: \"kubernetes.io/projected/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-kube-api-access-4nkts\") pod \"community-operators-wj84q\" (UID: \"244f25c5-3fe1-4b4f-a0f3-5fe163f20035\") " pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.203064 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nkts\" (UniqueName: \"kubernetes.io/projected/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-kube-api-access-4nkts\") pod \"community-operators-wj84q\" (UID: \"244f25c5-3fe1-4b4f-a0f3-5fe163f20035\") " pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.203671 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-utilities\") pod \"community-operators-wj84q\" (UID: \"244f25c5-3fe1-4b4f-a0f3-5fe163f20035\") " pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.203810 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-catalog-content\") pod \"community-operators-wj84q\" (UID: \"244f25c5-3fe1-4b4f-a0f3-5fe163f20035\") " pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.204350 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-catalog-content\") pod \"community-operators-wj84q\" (UID: \"244f25c5-3fe1-4b4f-a0f3-5fe163f20035\") " pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.204603 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-utilities\") pod \"community-operators-wj84q\" (UID: \"244f25c5-3fe1-4b4f-a0f3-5fe163f20035\") " pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.223676 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nkts\" (UniqueName: \"kubernetes.io/projected/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-kube-api-access-4nkts\") pod \"community-operators-wj84q\" (UID: \"244f25c5-3fe1-4b4f-a0f3-5fe163f20035\") " pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.281846 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-r94dw"] Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.327777 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.426667 4644 generic.go:334] "Generic (PLEG): container finished" podID="b8e25c17-c7ad-432c-8f80-b48bdd02b51e" containerID="15af4a3a425e5a5c83a2ed623582ef101ee9d25d178b9d026e97b1aed209d0b9" exitCode=0 Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.426727 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dphsh" event={"ID":"b8e25c17-c7ad-432c-8f80-b48bdd02b51e","Type":"ContainerDied","Data":"15af4a3a425e5a5c83a2ed623582ef101ee9d25d178b9d026e97b1aed209d0b9"} Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.430156 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" event={"ID":"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c","Type":"ContainerStarted","Data":"de7239da4abd8174182f77cc3c7777bc4c7d2e8d017ef9882a45d258cde41bf6"} Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.774804 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wj84q"] Dec 13 07:20:50 crc kubenswrapper[4644]: W1213 07:20:50.784923 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod244f25c5_3fe1_4b4f_a0f3_5fe163f20035.slice/crio-91667e23cd8df2bb35045ebf18f87d5d762af809b899b56e46976c45ff18d1c3 WatchSource:0}: Error finding container 91667e23cd8df2bb35045ebf18f87d5d762af809b899b56e46976c45ff18d1c3: Status 404 returned error can't find the container with id 91667e23cd8df2bb35045ebf18f87d5d762af809b899b56e46976c45ff18d1c3 Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.990324 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nj7sg"] Dec 13 07:20:50 crc kubenswrapper[4644]: I1213 07:20:50.992331 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.011051 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nj7sg"] Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.124231 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wr6w6\" (UniqueName: \"kubernetes.io/projected/c85999a2-248f-4c53-be17-bcee8f22b784-kube-api-access-wr6w6\") pod \"redhat-operators-nj7sg\" (UID: \"c85999a2-248f-4c53-be17-bcee8f22b784\") " pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.124322 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c85999a2-248f-4c53-be17-bcee8f22b784-catalog-content\") pod \"redhat-operators-nj7sg\" (UID: \"c85999a2-248f-4c53-be17-bcee8f22b784\") " pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.124407 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c85999a2-248f-4c53-be17-bcee8f22b784-utilities\") pod \"redhat-operators-nj7sg\" (UID: \"c85999a2-248f-4c53-be17-bcee8f22b784\") " pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.226392 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wr6w6\" (UniqueName: \"kubernetes.io/projected/c85999a2-248f-4c53-be17-bcee8f22b784-kube-api-access-wr6w6\") pod \"redhat-operators-nj7sg\" (UID: \"c85999a2-248f-4c53-be17-bcee8f22b784\") " pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.226522 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c85999a2-248f-4c53-be17-bcee8f22b784-catalog-content\") pod \"redhat-operators-nj7sg\" (UID: \"c85999a2-248f-4c53-be17-bcee8f22b784\") " pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.226582 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c85999a2-248f-4c53-be17-bcee8f22b784-utilities\") pod \"redhat-operators-nj7sg\" (UID: \"c85999a2-248f-4c53-be17-bcee8f22b784\") " pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.227079 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c85999a2-248f-4c53-be17-bcee8f22b784-utilities\") pod \"redhat-operators-nj7sg\" (UID: \"c85999a2-248f-4c53-be17-bcee8f22b784\") " pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.227200 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c85999a2-248f-4c53-be17-bcee8f22b784-catalog-content\") pod \"redhat-operators-nj7sg\" (UID: \"c85999a2-248f-4c53-be17-bcee8f22b784\") " pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.244659 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wr6w6\" (UniqueName: \"kubernetes.io/projected/c85999a2-248f-4c53-be17-bcee8f22b784-kube-api-access-wr6w6\") pod \"redhat-operators-nj7sg\" (UID: \"c85999a2-248f-4c53-be17-bcee8f22b784\") " pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.309182 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.442840 4644 generic.go:334] "Generic (PLEG): container finished" podID="244f25c5-3fe1-4b4f-a0f3-5fe163f20035" containerID="fe0f98fe6e03cf0b2086634d656be6b5d4ac19557d0cd0cfec0dd8ede6f27d1e" exitCode=0 Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.442996 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wj84q" event={"ID":"244f25c5-3fe1-4b4f-a0f3-5fe163f20035","Type":"ContainerDied","Data":"fe0f98fe6e03cf0b2086634d656be6b5d4ac19557d0cd0cfec0dd8ede6f27d1e"} Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.443159 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wj84q" event={"ID":"244f25c5-3fe1-4b4f-a0f3-5fe163f20035","Type":"ContainerStarted","Data":"91667e23cd8df2bb35045ebf18f87d5d762af809b899b56e46976c45ff18d1c3"} Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.447906 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dphsh" event={"ID":"b8e25c17-c7ad-432c-8f80-b48bdd02b51e","Type":"ContainerStarted","Data":"dc5722ada745cd329819ea8b377058252ef560a6cf079e13aa533c93dae54aa1"} Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.453760 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" event={"ID":"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c","Type":"ContainerStarted","Data":"ab69bb2dd79377a7dee5631f29d567117a1775efadcb2a55e2ce661210ada73b"} Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.488127 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dphsh" podStartSLOduration=2.942831869 podStartE2EDuration="4.488107344s" podCreationTimestamp="2025-12-13 07:20:47 +0000 UTC" firstStartedPulling="2025-12-13 07:20:49.421883035 +0000 UTC m=+2111.636833868" lastFinishedPulling="2025-12-13 07:20:50.96715851 +0000 UTC m=+2113.182109343" observedRunningTime="2025-12-13 07:20:51.481813089 +0000 UTC m=+2113.696763922" watchObservedRunningTime="2025-12-13 07:20:51.488107344 +0000 UTC m=+2113.703058177" Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.504901 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" podStartSLOduration=1.649702859 podStartE2EDuration="2.504879891s" podCreationTimestamp="2025-12-13 07:20:49 +0000 UTC" firstStartedPulling="2025-12-13 07:20:50.290387979 +0000 UTC m=+2112.505338812" lastFinishedPulling="2025-12-13 07:20:51.145565012 +0000 UTC m=+2113.360515844" observedRunningTime="2025-12-13 07:20:51.497634898 +0000 UTC m=+2113.712585731" watchObservedRunningTime="2025-12-13 07:20:51.504879891 +0000 UTC m=+2113.719830724" Dec 13 07:20:51 crc kubenswrapper[4644]: I1213 07:20:51.748809 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nj7sg"] Dec 13 07:20:51 crc kubenswrapper[4644]: W1213 07:20:51.759334 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc85999a2_248f_4c53_be17_bcee8f22b784.slice/crio-896e740813aba4def6ab8085d36074c32b6457e78b788e08cdef173873a2b147 WatchSource:0}: Error finding container 896e740813aba4def6ab8085d36074c32b6457e78b788e08cdef173873a2b147: Status 404 returned error can't find the container with id 896e740813aba4def6ab8085d36074c32b6457e78b788e08cdef173873a2b147 Dec 13 07:20:52 crc kubenswrapper[4644]: I1213 07:20:52.464637 4644 generic.go:334] "Generic (PLEG): container finished" podID="c85999a2-248f-4c53-be17-bcee8f22b784" containerID="dc31c81ded6f329863a5a4ec6159fdd3886dfee1c7216b956b54ca13dc7b025a" exitCode=0 Dec 13 07:20:52 crc kubenswrapper[4644]: I1213 07:20:52.464786 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nj7sg" event={"ID":"c85999a2-248f-4c53-be17-bcee8f22b784","Type":"ContainerDied","Data":"dc31c81ded6f329863a5a4ec6159fdd3886dfee1c7216b956b54ca13dc7b025a"} Dec 13 07:20:52 crc kubenswrapper[4644]: I1213 07:20:52.465069 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nj7sg" event={"ID":"c85999a2-248f-4c53-be17-bcee8f22b784","Type":"ContainerStarted","Data":"896e740813aba4def6ab8085d36074c32b6457e78b788e08cdef173873a2b147"} Dec 13 07:20:52 crc kubenswrapper[4644]: I1213 07:20:52.469561 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wj84q" event={"ID":"244f25c5-3fe1-4b4f-a0f3-5fe163f20035","Type":"ContainerStarted","Data":"958938e083a0647b4315114eeafdb15242ccc0c1b0386e93d794e99ca6bb62fe"} Dec 13 07:20:53 crc kubenswrapper[4644]: I1213 07:20:53.481302 4644 generic.go:334] "Generic (PLEG): container finished" podID="244f25c5-3fe1-4b4f-a0f3-5fe163f20035" containerID="958938e083a0647b4315114eeafdb15242ccc0c1b0386e93d794e99ca6bb62fe" exitCode=0 Dec 13 07:20:53 crc kubenswrapper[4644]: I1213 07:20:53.481388 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wj84q" event={"ID":"244f25c5-3fe1-4b4f-a0f3-5fe163f20035","Type":"ContainerDied","Data":"958938e083a0647b4315114eeafdb15242ccc0c1b0386e93d794e99ca6bb62fe"} Dec 13 07:20:53 crc kubenswrapper[4644]: I1213 07:20:53.492828 4644 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 07:20:54 crc kubenswrapper[4644]: I1213 07:20:54.494363 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nj7sg" event={"ID":"c85999a2-248f-4c53-be17-bcee8f22b784","Type":"ContainerStarted","Data":"ce7679e494b20c99ab78927524899e72abc3972a5fb82fcdce03015f45646b8f"} Dec 13 07:20:54 crc kubenswrapper[4644]: I1213 07:20:54.497964 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wj84q" event={"ID":"244f25c5-3fe1-4b4f-a0f3-5fe163f20035","Type":"ContainerStarted","Data":"33662f236312ebe17b35fcb6bdb26be8b937358898c2a5b589d9b83720bd87e9"} Dec 13 07:20:55 crc kubenswrapper[4644]: I1213 07:20:55.506992 4644 generic.go:334] "Generic (PLEG): container finished" podID="c85999a2-248f-4c53-be17-bcee8f22b784" containerID="ce7679e494b20c99ab78927524899e72abc3972a5fb82fcdce03015f45646b8f" exitCode=0 Dec 13 07:20:55 crc kubenswrapper[4644]: I1213 07:20:55.508569 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nj7sg" event={"ID":"c85999a2-248f-4c53-be17-bcee8f22b784","Type":"ContainerDied","Data":"ce7679e494b20c99ab78927524899e72abc3972a5fb82fcdce03015f45646b8f"} Dec 13 07:20:55 crc kubenswrapper[4644]: I1213 07:20:55.524390 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wj84q" podStartSLOduration=3.889926011 podStartE2EDuration="6.524372961s" podCreationTimestamp="2025-12-13 07:20:49 +0000 UTC" firstStartedPulling="2025-12-13 07:20:51.445653048 +0000 UTC m=+2113.660603881" lastFinishedPulling="2025-12-13 07:20:54.080100009 +0000 UTC m=+2116.295050831" observedRunningTime="2025-12-13 07:20:54.532026251 +0000 UTC m=+2116.746977085" watchObservedRunningTime="2025-12-13 07:20:55.524372961 +0000 UTC m=+2117.739323794" Dec 13 07:20:56 crc kubenswrapper[4644]: I1213 07:20:56.520078 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nj7sg" event={"ID":"c85999a2-248f-4c53-be17-bcee8f22b784","Type":"ContainerStarted","Data":"75e36c6ec34e0ba6fdccf517ff384221c19ec3653415a61bac7743ad47030784"} Dec 13 07:20:56 crc kubenswrapper[4644]: I1213 07:20:56.544535 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nj7sg" podStartSLOduration=2.864609028 podStartE2EDuration="6.544514593s" podCreationTimestamp="2025-12-13 07:20:50 +0000 UTC" firstStartedPulling="2025-12-13 07:20:52.466854502 +0000 UTC m=+2114.681805335" lastFinishedPulling="2025-12-13 07:20:56.146760067 +0000 UTC m=+2118.361710900" observedRunningTime="2025-12-13 07:20:56.539908212 +0000 UTC m=+2118.754859055" watchObservedRunningTime="2025-12-13 07:20:56.544514593 +0000 UTC m=+2118.759465426" Dec 13 07:20:58 crc kubenswrapper[4644]: I1213 07:20:58.123859 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:20:58 crc kubenswrapper[4644]: I1213 07:20:58.124175 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:20:58 crc kubenswrapper[4644]: I1213 07:20:58.158329 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:20:58 crc kubenswrapper[4644]: I1213 07:20:58.539536 4644 generic.go:334] "Generic (PLEG): container finished" podID="cc7fbb6b-2295-4c73-a14f-d5888e6fc75c" containerID="ab69bb2dd79377a7dee5631f29d567117a1775efadcb2a55e2ce661210ada73b" exitCode=0 Dec 13 07:20:58 crc kubenswrapper[4644]: I1213 07:20:58.539641 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" event={"ID":"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c","Type":"ContainerDied","Data":"ab69bb2dd79377a7dee5631f29d567117a1775efadcb2a55e2ce661210ada73b"} Dec 13 07:20:58 crc kubenswrapper[4644]: I1213 07:20:58.580186 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:20:59 crc kubenswrapper[4644]: I1213 07:20:59.887951 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:20:59 crc kubenswrapper[4644]: I1213 07:20:59.938064 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-ssh-key-openstack-edpm-ipam\") pod \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " Dec 13 07:20:59 crc kubenswrapper[4644]: I1213 07:20:59.938117 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-inventory-0\") pod \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " Dec 13 07:20:59 crc kubenswrapper[4644]: I1213 07:20:59.938184 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-ceph\") pod \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " Dec 13 07:20:59 crc kubenswrapper[4644]: I1213 07:20:59.938299 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mxph\" (UniqueName: \"kubernetes.io/projected/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-kube-api-access-8mxph\") pod \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\" (UID: \"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c\") " Dec 13 07:20:59 crc kubenswrapper[4644]: I1213 07:20:59.945428 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-ceph" (OuterVolumeSpecName: "ceph") pod "cc7fbb6b-2295-4c73-a14f-d5888e6fc75c" (UID: "cc7fbb6b-2295-4c73-a14f-d5888e6fc75c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:20:59 crc kubenswrapper[4644]: I1213 07:20:59.945644 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-kube-api-access-8mxph" (OuterVolumeSpecName: "kube-api-access-8mxph") pod "cc7fbb6b-2295-4c73-a14f-d5888e6fc75c" (UID: "cc7fbb6b-2295-4c73-a14f-d5888e6fc75c"). InnerVolumeSpecName "kube-api-access-8mxph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:20:59 crc kubenswrapper[4644]: I1213 07:20:59.962410 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "cc7fbb6b-2295-4c73-a14f-d5888e6fc75c" (UID: "cc7fbb6b-2295-4c73-a14f-d5888e6fc75c"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:20:59 crc kubenswrapper[4644]: I1213 07:20:59.967914 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "cc7fbb6b-2295-4c73-a14f-d5888e6fc75c" (UID: "cc7fbb6b-2295-4c73-a14f-d5888e6fc75c"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.041308 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.041348 4644 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.041359 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.041369 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mxph\" (UniqueName: \"kubernetes.io/projected/cc7fbb6b-2295-4c73-a14f-d5888e6fc75c-kube-api-access-8mxph\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.328612 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.328692 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.379431 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.559635 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.559737 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-r94dw" event={"ID":"cc7fbb6b-2295-4c73-a14f-d5888e6fc75c","Type":"ContainerDied","Data":"de7239da4abd8174182f77cc3c7777bc4c7d2e8d017ef9882a45d258cde41bf6"} Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.560007 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de7239da4abd8174182f77cc3c7777bc4c7d2e8d017ef9882a45d258cde41bf6" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.600085 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.612222 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l"] Dec 13 07:21:00 crc kubenswrapper[4644]: E1213 07:21:00.612657 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc7fbb6b-2295-4c73-a14f-d5888e6fc75c" containerName="ssh-known-hosts-edpm-deployment" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.612678 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc7fbb6b-2295-4c73-a14f-d5888e6fc75c" containerName="ssh-known-hosts-edpm-deployment" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.612873 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc7fbb6b-2295-4c73-a14f-d5888e6fc75c" containerName="ssh-known-hosts-edpm-deployment" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.613467 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.615349 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.616080 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.616384 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.616550 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.620519 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.621647 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l"] Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.652494 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-z2w8l\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.652626 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-z2w8l\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.652889 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktsbv\" (UniqueName: \"kubernetes.io/projected/802870f9-d6d6-493e-a2b0-69d7067dadbe-kube-api-access-ktsbv\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-z2w8l\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.653235 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-z2w8l\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.755486 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-z2w8l\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.755821 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-z2w8l\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.755959 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-z2w8l\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.756167 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktsbv\" (UniqueName: \"kubernetes.io/projected/802870f9-d6d6-493e-a2b0-69d7067dadbe-kube-api-access-ktsbv\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-z2w8l\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.761177 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-z2w8l\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.761533 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-z2w8l\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.764672 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-z2w8l\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.774017 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktsbv\" (UniqueName: \"kubernetes.io/projected/802870f9-d6d6-493e-a2b0-69d7067dadbe-kube-api-access-ktsbv\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-z2w8l\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.937681 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.986202 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dphsh"] Dec 13 07:21:00 crc kubenswrapper[4644]: I1213 07:21:00.986549 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dphsh" podUID="b8e25c17-c7ad-432c-8f80-b48bdd02b51e" containerName="registry-server" containerID="cri-o://dc5722ada745cd329819ea8b377058252ef560a6cf079e13aa533c93dae54aa1" gracePeriod=2 Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.310725 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.310989 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.340837 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.351939 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.368555 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77zrk\" (UniqueName: \"kubernetes.io/projected/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-kube-api-access-77zrk\") pod \"b8e25c17-c7ad-432c-8f80-b48bdd02b51e\" (UID: \"b8e25c17-c7ad-432c-8f80-b48bdd02b51e\") " Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.368641 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-utilities\") pod \"b8e25c17-c7ad-432c-8f80-b48bdd02b51e\" (UID: \"b8e25c17-c7ad-432c-8f80-b48bdd02b51e\") " Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.368678 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-catalog-content\") pod \"b8e25c17-c7ad-432c-8f80-b48bdd02b51e\" (UID: \"b8e25c17-c7ad-432c-8f80-b48bdd02b51e\") " Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.369417 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-utilities" (OuterVolumeSpecName: "utilities") pod "b8e25c17-c7ad-432c-8f80-b48bdd02b51e" (UID: "b8e25c17-c7ad-432c-8f80-b48bdd02b51e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.370179 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.378148 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-kube-api-access-77zrk" (OuterVolumeSpecName: "kube-api-access-77zrk") pod "b8e25c17-c7ad-432c-8f80-b48bdd02b51e" (UID: "b8e25c17-c7ad-432c-8f80-b48bdd02b51e"). InnerVolumeSpecName "kube-api-access-77zrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.385662 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8e25c17-c7ad-432c-8f80-b48bdd02b51e" (UID: "b8e25c17-c7ad-432c-8f80-b48bdd02b51e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.405937 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l"] Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.472602 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77zrk\" (UniqueName: \"kubernetes.io/projected/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-kube-api-access-77zrk\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.472727 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8e25c17-c7ad-432c-8f80-b48bdd02b51e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.572944 4644 generic.go:334] "Generic (PLEG): container finished" podID="b8e25c17-c7ad-432c-8f80-b48bdd02b51e" containerID="dc5722ada745cd329819ea8b377058252ef560a6cf079e13aa533c93dae54aa1" exitCode=0 Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.573098 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dphsh" event={"ID":"b8e25c17-c7ad-432c-8f80-b48bdd02b51e","Type":"ContainerDied","Data":"dc5722ada745cd329819ea8b377058252ef560a6cf079e13aa533c93dae54aa1"} Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.573128 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dphsh" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.573187 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dphsh" event={"ID":"b8e25c17-c7ad-432c-8f80-b48bdd02b51e","Type":"ContainerDied","Data":"c244a68ed295d77c84bc3e35a30579a9bcd1a3310e1d01da19f321c7f34a9457"} Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.573231 4644 scope.go:117] "RemoveContainer" containerID="dc5722ada745cd329819ea8b377058252ef560a6cf079e13aa533c93dae54aa1" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.576405 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" event={"ID":"802870f9-d6d6-493e-a2b0-69d7067dadbe","Type":"ContainerStarted","Data":"94fa0851dced1b597ddb686abb2c3289334b373b856bf8a43df2cb859fcee09f"} Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.597653 4644 scope.go:117] "RemoveContainer" containerID="15af4a3a425e5a5c83a2ed623582ef101ee9d25d178b9d026e97b1aed209d0b9" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.605491 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dphsh"] Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.613167 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dphsh"] Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.617569 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.620253 4644 scope.go:117] "RemoveContainer" containerID="afcc8fd01c9e3e4ea3fedc7a8739f4f75702d3ed7e40655d712c88e0e3807c30" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.645253 4644 scope.go:117] "RemoveContainer" containerID="dc5722ada745cd329819ea8b377058252ef560a6cf079e13aa533c93dae54aa1" Dec 13 07:21:01 crc kubenswrapper[4644]: E1213 07:21:01.647161 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc5722ada745cd329819ea8b377058252ef560a6cf079e13aa533c93dae54aa1\": container with ID starting with dc5722ada745cd329819ea8b377058252ef560a6cf079e13aa533c93dae54aa1 not found: ID does not exist" containerID="dc5722ada745cd329819ea8b377058252ef560a6cf079e13aa533c93dae54aa1" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.647209 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc5722ada745cd329819ea8b377058252ef560a6cf079e13aa533c93dae54aa1"} err="failed to get container status \"dc5722ada745cd329819ea8b377058252ef560a6cf079e13aa533c93dae54aa1\": rpc error: code = NotFound desc = could not find container \"dc5722ada745cd329819ea8b377058252ef560a6cf079e13aa533c93dae54aa1\": container with ID starting with dc5722ada745cd329819ea8b377058252ef560a6cf079e13aa533c93dae54aa1 not found: ID does not exist" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.647233 4644 scope.go:117] "RemoveContainer" containerID="15af4a3a425e5a5c83a2ed623582ef101ee9d25d178b9d026e97b1aed209d0b9" Dec 13 07:21:01 crc kubenswrapper[4644]: E1213 07:21:01.647540 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15af4a3a425e5a5c83a2ed623582ef101ee9d25d178b9d026e97b1aed209d0b9\": container with ID starting with 15af4a3a425e5a5c83a2ed623582ef101ee9d25d178b9d026e97b1aed209d0b9 not found: ID does not exist" containerID="15af4a3a425e5a5c83a2ed623582ef101ee9d25d178b9d026e97b1aed209d0b9" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.647568 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15af4a3a425e5a5c83a2ed623582ef101ee9d25d178b9d026e97b1aed209d0b9"} err="failed to get container status \"15af4a3a425e5a5c83a2ed623582ef101ee9d25d178b9d026e97b1aed209d0b9\": rpc error: code = NotFound desc = could not find container \"15af4a3a425e5a5c83a2ed623582ef101ee9d25d178b9d026e97b1aed209d0b9\": container with ID starting with 15af4a3a425e5a5c83a2ed623582ef101ee9d25d178b9d026e97b1aed209d0b9 not found: ID does not exist" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.647581 4644 scope.go:117] "RemoveContainer" containerID="afcc8fd01c9e3e4ea3fedc7a8739f4f75702d3ed7e40655d712c88e0e3807c30" Dec 13 07:21:01 crc kubenswrapper[4644]: E1213 07:21:01.647893 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afcc8fd01c9e3e4ea3fedc7a8739f4f75702d3ed7e40655d712c88e0e3807c30\": container with ID starting with afcc8fd01c9e3e4ea3fedc7a8739f4f75702d3ed7e40655d712c88e0e3807c30 not found: ID does not exist" containerID="afcc8fd01c9e3e4ea3fedc7a8739f4f75702d3ed7e40655d712c88e0e3807c30" Dec 13 07:21:01 crc kubenswrapper[4644]: I1213 07:21:01.647912 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afcc8fd01c9e3e4ea3fedc7a8739f4f75702d3ed7e40655d712c88e0e3807c30"} err="failed to get container status \"afcc8fd01c9e3e4ea3fedc7a8739f4f75702d3ed7e40655d712c88e0e3807c30\": rpc error: code = NotFound desc = could not find container \"afcc8fd01c9e3e4ea3fedc7a8739f4f75702d3ed7e40655d712c88e0e3807c30\": container with ID starting with afcc8fd01c9e3e4ea3fedc7a8739f4f75702d3ed7e40655d712c88e0e3807c30 not found: ID does not exist" Dec 13 07:21:02 crc kubenswrapper[4644]: I1213 07:21:02.398380 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8e25c17-c7ad-432c-8f80-b48bdd02b51e" path="/var/lib/kubelet/pods/b8e25c17-c7ad-432c-8f80-b48bdd02b51e/volumes" Dec 13 07:21:02 crc kubenswrapper[4644]: I1213 07:21:02.583952 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" event={"ID":"802870f9-d6d6-493e-a2b0-69d7067dadbe","Type":"ContainerStarted","Data":"85f874b64b525f98324a9d05d8964f40c3f9f75a0b01857cafb97ab2d8800111"} Dec 13 07:21:02 crc kubenswrapper[4644]: I1213 07:21:02.610078 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" podStartSLOduration=1.9586537339999999 podStartE2EDuration="2.61005806s" podCreationTimestamp="2025-12-13 07:21:00 +0000 UTC" firstStartedPulling="2025-12-13 07:21:01.409493715 +0000 UTC m=+2123.624444548" lastFinishedPulling="2025-12-13 07:21:02.060898041 +0000 UTC m=+2124.275848874" observedRunningTime="2025-12-13 07:21:02.605222358 +0000 UTC m=+2124.820173191" watchObservedRunningTime="2025-12-13 07:21:02.61005806 +0000 UTC m=+2124.825008893" Dec 13 07:21:02 crc kubenswrapper[4644]: I1213 07:21:02.779316 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wj84q"] Dec 13 07:21:03 crc kubenswrapper[4644]: I1213 07:21:03.594507 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wj84q" podUID="244f25c5-3fe1-4b4f-a0f3-5fe163f20035" containerName="registry-server" containerID="cri-o://33662f236312ebe17b35fcb6bdb26be8b937358898c2a5b589d9b83720bd87e9" gracePeriod=2 Dec 13 07:21:03 crc kubenswrapper[4644]: I1213 07:21:03.957999 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.040681 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-catalog-content\") pod \"244f25c5-3fe1-4b4f-a0f3-5fe163f20035\" (UID: \"244f25c5-3fe1-4b4f-a0f3-5fe163f20035\") " Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.041173 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nkts\" (UniqueName: \"kubernetes.io/projected/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-kube-api-access-4nkts\") pod \"244f25c5-3fe1-4b4f-a0f3-5fe163f20035\" (UID: \"244f25c5-3fe1-4b4f-a0f3-5fe163f20035\") " Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.041345 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-utilities\") pod \"244f25c5-3fe1-4b4f-a0f3-5fe163f20035\" (UID: \"244f25c5-3fe1-4b4f-a0f3-5fe163f20035\") " Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.042361 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-utilities" (OuterVolumeSpecName: "utilities") pod "244f25c5-3fe1-4b4f-a0f3-5fe163f20035" (UID: "244f25c5-3fe1-4b4f-a0f3-5fe163f20035"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.049510 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-kube-api-access-4nkts" (OuterVolumeSpecName: "kube-api-access-4nkts") pod "244f25c5-3fe1-4b4f-a0f3-5fe163f20035" (UID: "244f25c5-3fe1-4b4f-a0f3-5fe163f20035"). InnerVolumeSpecName "kube-api-access-4nkts". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.082213 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "244f25c5-3fe1-4b4f-a0f3-5fe163f20035" (UID: "244f25c5-3fe1-4b4f-a0f3-5fe163f20035"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.145422 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nkts\" (UniqueName: \"kubernetes.io/projected/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-kube-api-access-4nkts\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.145467 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.145480 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/244f25c5-3fe1-4b4f-a0f3-5fe163f20035-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.603022 4644 generic.go:334] "Generic (PLEG): container finished" podID="244f25c5-3fe1-4b4f-a0f3-5fe163f20035" containerID="33662f236312ebe17b35fcb6bdb26be8b937358898c2a5b589d9b83720bd87e9" exitCode=0 Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.603068 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wj84q" event={"ID":"244f25c5-3fe1-4b4f-a0f3-5fe163f20035","Type":"ContainerDied","Data":"33662f236312ebe17b35fcb6bdb26be8b937358898c2a5b589d9b83720bd87e9"} Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.603097 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wj84q" event={"ID":"244f25c5-3fe1-4b4f-a0f3-5fe163f20035","Type":"ContainerDied","Data":"91667e23cd8df2bb35045ebf18f87d5d762af809b899b56e46976c45ff18d1c3"} Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.603107 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wj84q" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.603114 4644 scope.go:117] "RemoveContainer" containerID="33662f236312ebe17b35fcb6bdb26be8b937358898c2a5b589d9b83720bd87e9" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.625994 4644 scope.go:117] "RemoveContainer" containerID="958938e083a0647b4315114eeafdb15242ccc0c1b0386e93d794e99ca6bb62fe" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.628267 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wj84q"] Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.634005 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wj84q"] Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.665686 4644 scope.go:117] "RemoveContainer" containerID="fe0f98fe6e03cf0b2086634d656be6b5d4ac19557d0cd0cfec0dd8ede6f27d1e" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.686738 4644 scope.go:117] "RemoveContainer" containerID="33662f236312ebe17b35fcb6bdb26be8b937358898c2a5b589d9b83720bd87e9" Dec 13 07:21:04 crc kubenswrapper[4644]: E1213 07:21:04.687252 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33662f236312ebe17b35fcb6bdb26be8b937358898c2a5b589d9b83720bd87e9\": container with ID starting with 33662f236312ebe17b35fcb6bdb26be8b937358898c2a5b589d9b83720bd87e9 not found: ID does not exist" containerID="33662f236312ebe17b35fcb6bdb26be8b937358898c2a5b589d9b83720bd87e9" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.687395 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33662f236312ebe17b35fcb6bdb26be8b937358898c2a5b589d9b83720bd87e9"} err="failed to get container status \"33662f236312ebe17b35fcb6bdb26be8b937358898c2a5b589d9b83720bd87e9\": rpc error: code = NotFound desc = could not find container \"33662f236312ebe17b35fcb6bdb26be8b937358898c2a5b589d9b83720bd87e9\": container with ID starting with 33662f236312ebe17b35fcb6bdb26be8b937358898c2a5b589d9b83720bd87e9 not found: ID does not exist" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.687538 4644 scope.go:117] "RemoveContainer" containerID="958938e083a0647b4315114eeafdb15242ccc0c1b0386e93d794e99ca6bb62fe" Dec 13 07:21:04 crc kubenswrapper[4644]: E1213 07:21:04.688039 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"958938e083a0647b4315114eeafdb15242ccc0c1b0386e93d794e99ca6bb62fe\": container with ID starting with 958938e083a0647b4315114eeafdb15242ccc0c1b0386e93d794e99ca6bb62fe not found: ID does not exist" containerID="958938e083a0647b4315114eeafdb15242ccc0c1b0386e93d794e99ca6bb62fe" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.688138 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"958938e083a0647b4315114eeafdb15242ccc0c1b0386e93d794e99ca6bb62fe"} err="failed to get container status \"958938e083a0647b4315114eeafdb15242ccc0c1b0386e93d794e99ca6bb62fe\": rpc error: code = NotFound desc = could not find container \"958938e083a0647b4315114eeafdb15242ccc0c1b0386e93d794e99ca6bb62fe\": container with ID starting with 958938e083a0647b4315114eeafdb15242ccc0c1b0386e93d794e99ca6bb62fe not found: ID does not exist" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.688236 4644 scope.go:117] "RemoveContainer" containerID="fe0f98fe6e03cf0b2086634d656be6b5d4ac19557d0cd0cfec0dd8ede6f27d1e" Dec 13 07:21:04 crc kubenswrapper[4644]: E1213 07:21:04.688671 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe0f98fe6e03cf0b2086634d656be6b5d4ac19557d0cd0cfec0dd8ede6f27d1e\": container with ID starting with fe0f98fe6e03cf0b2086634d656be6b5d4ac19557d0cd0cfec0dd8ede6f27d1e not found: ID does not exist" containerID="fe0f98fe6e03cf0b2086634d656be6b5d4ac19557d0cd0cfec0dd8ede6f27d1e" Dec 13 07:21:04 crc kubenswrapper[4644]: I1213 07:21:04.688779 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe0f98fe6e03cf0b2086634d656be6b5d4ac19557d0cd0cfec0dd8ede6f27d1e"} err="failed to get container status \"fe0f98fe6e03cf0b2086634d656be6b5d4ac19557d0cd0cfec0dd8ede6f27d1e\": rpc error: code = NotFound desc = could not find container \"fe0f98fe6e03cf0b2086634d656be6b5d4ac19557d0cd0cfec0dd8ede6f27d1e\": container with ID starting with fe0f98fe6e03cf0b2086634d656be6b5d4ac19557d0cd0cfec0dd8ede6f27d1e not found: ID does not exist" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.180771 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nj7sg"] Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.181189 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nj7sg" podUID="c85999a2-248f-4c53-be17-bcee8f22b784" containerName="registry-server" containerID="cri-o://75e36c6ec34e0ba6fdccf517ff384221c19ec3653415a61bac7743ad47030784" gracePeriod=2 Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.557287 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.577149 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c85999a2-248f-4c53-be17-bcee8f22b784-utilities\") pod \"c85999a2-248f-4c53-be17-bcee8f22b784\" (UID: \"c85999a2-248f-4c53-be17-bcee8f22b784\") " Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.577958 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c85999a2-248f-4c53-be17-bcee8f22b784-catalog-content\") pod \"c85999a2-248f-4c53-be17-bcee8f22b784\" (UID: \"c85999a2-248f-4c53-be17-bcee8f22b784\") " Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.578057 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wr6w6\" (UniqueName: \"kubernetes.io/projected/c85999a2-248f-4c53-be17-bcee8f22b784-kube-api-access-wr6w6\") pod \"c85999a2-248f-4c53-be17-bcee8f22b784\" (UID: \"c85999a2-248f-4c53-be17-bcee8f22b784\") " Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.589398 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c85999a2-248f-4c53-be17-bcee8f22b784-utilities" (OuterVolumeSpecName: "utilities") pod "c85999a2-248f-4c53-be17-bcee8f22b784" (UID: "c85999a2-248f-4c53-be17-bcee8f22b784"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.594918 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c85999a2-248f-4c53-be17-bcee8f22b784-kube-api-access-wr6w6" (OuterVolumeSpecName: "kube-api-access-wr6w6") pod "c85999a2-248f-4c53-be17-bcee8f22b784" (UID: "c85999a2-248f-4c53-be17-bcee8f22b784"). InnerVolumeSpecName "kube-api-access-wr6w6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.615956 4644 generic.go:334] "Generic (PLEG): container finished" podID="c85999a2-248f-4c53-be17-bcee8f22b784" containerID="75e36c6ec34e0ba6fdccf517ff384221c19ec3653415a61bac7743ad47030784" exitCode=0 Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.616075 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nj7sg" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.616141 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nj7sg" event={"ID":"c85999a2-248f-4c53-be17-bcee8f22b784","Type":"ContainerDied","Data":"75e36c6ec34e0ba6fdccf517ff384221c19ec3653415a61bac7743ad47030784"} Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.616396 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nj7sg" event={"ID":"c85999a2-248f-4c53-be17-bcee8f22b784","Type":"ContainerDied","Data":"896e740813aba4def6ab8085d36074c32b6457e78b788e08cdef173873a2b147"} Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.616422 4644 scope.go:117] "RemoveContainer" containerID="75e36c6ec34e0ba6fdccf517ff384221c19ec3653415a61bac7743ad47030784" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.634278 4644 scope.go:117] "RemoveContainer" containerID="ce7679e494b20c99ab78927524899e72abc3972a5fb82fcdce03015f45646b8f" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.658785 4644 scope.go:117] "RemoveContainer" containerID="dc31c81ded6f329863a5a4ec6159fdd3886dfee1c7216b956b54ca13dc7b025a" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.680189 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c85999a2-248f-4c53-be17-bcee8f22b784-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c85999a2-248f-4c53-be17-bcee8f22b784" (UID: "c85999a2-248f-4c53-be17-bcee8f22b784"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.680211 4644 scope.go:117] "RemoveContainer" containerID="75e36c6ec34e0ba6fdccf517ff384221c19ec3653415a61bac7743ad47030784" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.680480 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c85999a2-248f-4c53-be17-bcee8f22b784-catalog-content\") pod \"c85999a2-248f-4c53-be17-bcee8f22b784\" (UID: \"c85999a2-248f-4c53-be17-bcee8f22b784\") " Dec 13 07:21:05 crc kubenswrapper[4644]: W1213 07:21:05.680711 4644 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/c85999a2-248f-4c53-be17-bcee8f22b784/volumes/kubernetes.io~empty-dir/catalog-content Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.680743 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c85999a2-248f-4c53-be17-bcee8f22b784-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c85999a2-248f-4c53-be17-bcee8f22b784" (UID: "c85999a2-248f-4c53-be17-bcee8f22b784"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:21:05 crc kubenswrapper[4644]: E1213 07:21:05.680767 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75e36c6ec34e0ba6fdccf517ff384221c19ec3653415a61bac7743ad47030784\": container with ID starting with 75e36c6ec34e0ba6fdccf517ff384221c19ec3653415a61bac7743ad47030784 not found: ID does not exist" containerID="75e36c6ec34e0ba6fdccf517ff384221c19ec3653415a61bac7743ad47030784" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.680798 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75e36c6ec34e0ba6fdccf517ff384221c19ec3653415a61bac7743ad47030784"} err="failed to get container status \"75e36c6ec34e0ba6fdccf517ff384221c19ec3653415a61bac7743ad47030784\": rpc error: code = NotFound desc = could not find container \"75e36c6ec34e0ba6fdccf517ff384221c19ec3653415a61bac7743ad47030784\": container with ID starting with 75e36c6ec34e0ba6fdccf517ff384221c19ec3653415a61bac7743ad47030784 not found: ID does not exist" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.680823 4644 scope.go:117] "RemoveContainer" containerID="ce7679e494b20c99ab78927524899e72abc3972a5fb82fcdce03015f45646b8f" Dec 13 07:21:05 crc kubenswrapper[4644]: E1213 07:21:05.681205 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce7679e494b20c99ab78927524899e72abc3972a5fb82fcdce03015f45646b8f\": container with ID starting with ce7679e494b20c99ab78927524899e72abc3972a5fb82fcdce03015f45646b8f not found: ID does not exist" containerID="ce7679e494b20c99ab78927524899e72abc3972a5fb82fcdce03015f45646b8f" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.681294 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce7679e494b20c99ab78927524899e72abc3972a5fb82fcdce03015f45646b8f"} err="failed to get container status \"ce7679e494b20c99ab78927524899e72abc3972a5fb82fcdce03015f45646b8f\": rpc error: code = NotFound desc = could not find container \"ce7679e494b20c99ab78927524899e72abc3972a5fb82fcdce03015f45646b8f\": container with ID starting with ce7679e494b20c99ab78927524899e72abc3972a5fb82fcdce03015f45646b8f not found: ID does not exist" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.681370 4644 scope.go:117] "RemoveContainer" containerID="dc31c81ded6f329863a5a4ec6159fdd3886dfee1c7216b956b54ca13dc7b025a" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.681457 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wr6w6\" (UniqueName: \"kubernetes.io/projected/c85999a2-248f-4c53-be17-bcee8f22b784-kube-api-access-wr6w6\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.681475 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c85999a2-248f-4c53-be17-bcee8f22b784-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.681484 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c85999a2-248f-4c53-be17-bcee8f22b784-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:05 crc kubenswrapper[4644]: E1213 07:21:05.682007 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc31c81ded6f329863a5a4ec6159fdd3886dfee1c7216b956b54ca13dc7b025a\": container with ID starting with dc31c81ded6f329863a5a4ec6159fdd3886dfee1c7216b956b54ca13dc7b025a not found: ID does not exist" containerID="dc31c81ded6f329863a5a4ec6159fdd3886dfee1c7216b956b54ca13dc7b025a" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.682068 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc31c81ded6f329863a5a4ec6159fdd3886dfee1c7216b956b54ca13dc7b025a"} err="failed to get container status \"dc31c81ded6f329863a5a4ec6159fdd3886dfee1c7216b956b54ca13dc7b025a\": rpc error: code = NotFound desc = could not find container \"dc31c81ded6f329863a5a4ec6159fdd3886dfee1c7216b956b54ca13dc7b025a\": container with ID starting with dc31c81ded6f329863a5a4ec6159fdd3886dfee1c7216b956b54ca13dc7b025a not found: ID does not exist" Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.945005 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nj7sg"] Dec 13 07:21:05 crc kubenswrapper[4644]: I1213 07:21:05.951942 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nj7sg"] Dec 13 07:21:06 crc kubenswrapper[4644]: I1213 07:21:06.401718 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="244f25c5-3fe1-4b4f-a0f3-5fe163f20035" path="/var/lib/kubelet/pods/244f25c5-3fe1-4b4f-a0f3-5fe163f20035/volumes" Dec 13 07:21:06 crc kubenswrapper[4644]: I1213 07:21:06.402413 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c85999a2-248f-4c53-be17-bcee8f22b784" path="/var/lib/kubelet/pods/c85999a2-248f-4c53-be17-bcee8f22b784/volumes" Dec 13 07:21:08 crc kubenswrapper[4644]: I1213 07:21:08.645234 4644 generic.go:334] "Generic (PLEG): container finished" podID="802870f9-d6d6-493e-a2b0-69d7067dadbe" containerID="85f874b64b525f98324a9d05d8964f40c3f9f75a0b01857cafb97ab2d8800111" exitCode=0 Dec 13 07:21:08 crc kubenswrapper[4644]: I1213 07:21:08.645318 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" event={"ID":"802870f9-d6d6-493e-a2b0-69d7067dadbe","Type":"ContainerDied","Data":"85f874b64b525f98324a9d05d8964f40c3f9f75a0b01857cafb97ab2d8800111"} Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.011954 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.082375 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktsbv\" (UniqueName: \"kubernetes.io/projected/802870f9-d6d6-493e-a2b0-69d7067dadbe-kube-api-access-ktsbv\") pod \"802870f9-d6d6-493e-a2b0-69d7067dadbe\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.082431 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-inventory\") pod \"802870f9-d6d6-493e-a2b0-69d7067dadbe\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.082489 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-ssh-key\") pod \"802870f9-d6d6-493e-a2b0-69d7067dadbe\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.082565 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-ceph\") pod \"802870f9-d6d6-493e-a2b0-69d7067dadbe\" (UID: \"802870f9-d6d6-493e-a2b0-69d7067dadbe\") " Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.088244 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-ceph" (OuterVolumeSpecName: "ceph") pod "802870f9-d6d6-493e-a2b0-69d7067dadbe" (UID: "802870f9-d6d6-493e-a2b0-69d7067dadbe"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.088517 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/802870f9-d6d6-493e-a2b0-69d7067dadbe-kube-api-access-ktsbv" (OuterVolumeSpecName: "kube-api-access-ktsbv") pod "802870f9-d6d6-493e-a2b0-69d7067dadbe" (UID: "802870f9-d6d6-493e-a2b0-69d7067dadbe"). InnerVolumeSpecName "kube-api-access-ktsbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.105319 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "802870f9-d6d6-493e-a2b0-69d7067dadbe" (UID: "802870f9-d6d6-493e-a2b0-69d7067dadbe"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.106337 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-inventory" (OuterVolumeSpecName: "inventory") pod "802870f9-d6d6-493e-a2b0-69d7067dadbe" (UID: "802870f9-d6d6-493e-a2b0-69d7067dadbe"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.186480 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktsbv\" (UniqueName: \"kubernetes.io/projected/802870f9-d6d6-493e-a2b0-69d7067dadbe-kube-api-access-ktsbv\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.186517 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.186530 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.186544 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/802870f9-d6d6-493e-a2b0-69d7067dadbe-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.666585 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" event={"ID":"802870f9-d6d6-493e-a2b0-69d7067dadbe","Type":"ContainerDied","Data":"94fa0851dced1b597ddb686abb2c3289334b373b856bf8a43df2cb859fcee09f"} Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.666626 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94fa0851dced1b597ddb686abb2c3289334b373b856bf8a43df2cb859fcee09f" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.666683 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-z2w8l" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.719501 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc"] Dec 13 07:21:10 crc kubenswrapper[4644]: E1213 07:21:10.719856 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8e25c17-c7ad-432c-8f80-b48bdd02b51e" containerName="registry-server" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.719874 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8e25c17-c7ad-432c-8f80-b48bdd02b51e" containerName="registry-server" Dec 13 07:21:10 crc kubenswrapper[4644]: E1213 07:21:10.719895 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c85999a2-248f-4c53-be17-bcee8f22b784" containerName="extract-utilities" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.719902 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="c85999a2-248f-4c53-be17-bcee8f22b784" containerName="extract-utilities" Dec 13 07:21:10 crc kubenswrapper[4644]: E1213 07:21:10.719917 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8e25c17-c7ad-432c-8f80-b48bdd02b51e" containerName="extract-content" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.719923 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8e25c17-c7ad-432c-8f80-b48bdd02b51e" containerName="extract-content" Dec 13 07:21:10 crc kubenswrapper[4644]: E1213 07:21:10.719937 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c85999a2-248f-4c53-be17-bcee8f22b784" containerName="registry-server" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.719942 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="c85999a2-248f-4c53-be17-bcee8f22b784" containerName="registry-server" Dec 13 07:21:10 crc kubenswrapper[4644]: E1213 07:21:10.719951 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="244f25c5-3fe1-4b4f-a0f3-5fe163f20035" containerName="registry-server" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.719956 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="244f25c5-3fe1-4b4f-a0f3-5fe163f20035" containerName="registry-server" Dec 13 07:21:10 crc kubenswrapper[4644]: E1213 07:21:10.719969 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="802870f9-d6d6-493e-a2b0-69d7067dadbe" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.719975 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="802870f9-d6d6-493e-a2b0-69d7067dadbe" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:21:10 crc kubenswrapper[4644]: E1213 07:21:10.719983 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="244f25c5-3fe1-4b4f-a0f3-5fe163f20035" containerName="extract-utilities" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.719988 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="244f25c5-3fe1-4b4f-a0f3-5fe163f20035" containerName="extract-utilities" Dec 13 07:21:10 crc kubenswrapper[4644]: E1213 07:21:10.719996 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c85999a2-248f-4c53-be17-bcee8f22b784" containerName="extract-content" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.720002 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="c85999a2-248f-4c53-be17-bcee8f22b784" containerName="extract-content" Dec 13 07:21:10 crc kubenswrapper[4644]: E1213 07:21:10.720010 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="244f25c5-3fe1-4b4f-a0f3-5fe163f20035" containerName="extract-content" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.720015 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="244f25c5-3fe1-4b4f-a0f3-5fe163f20035" containerName="extract-content" Dec 13 07:21:10 crc kubenswrapper[4644]: E1213 07:21:10.720024 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8e25c17-c7ad-432c-8f80-b48bdd02b51e" containerName="extract-utilities" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.720029 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8e25c17-c7ad-432c-8f80-b48bdd02b51e" containerName="extract-utilities" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.720183 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="244f25c5-3fe1-4b4f-a0f3-5fe163f20035" containerName="registry-server" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.720211 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="802870f9-d6d6-493e-a2b0-69d7067dadbe" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.720222 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="c85999a2-248f-4c53-be17-bcee8f22b784" containerName="registry-server" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.720230 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8e25c17-c7ad-432c-8f80-b48bdd02b51e" containerName="registry-server" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.720760 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.723423 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.723563 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.723661 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.723771 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.724512 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.730975 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc"] Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.797493 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.797748 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.797808 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4mtt\" (UniqueName: \"kubernetes.io/projected/bf00b61e-57cf-4cc2-8fd7-44b661469364-kube-api-access-g4mtt\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.797844 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.899557 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4mtt\" (UniqueName: \"kubernetes.io/projected/bf00b61e-57cf-4cc2-8fd7-44b661469364-kube-api-access-g4mtt\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.899603 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.899759 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.900013 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.904488 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.904505 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.904850 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:10 crc kubenswrapper[4644]: I1213 07:21:10.915508 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4mtt\" (UniqueName: \"kubernetes.io/projected/bf00b61e-57cf-4cc2-8fd7-44b661469364-kube-api-access-g4mtt\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:11 crc kubenswrapper[4644]: I1213 07:21:11.033525 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:11 crc kubenswrapper[4644]: I1213 07:21:11.701897 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc"] Dec 13 07:21:12 crc kubenswrapper[4644]: I1213 07:21:12.697221 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" event={"ID":"bf00b61e-57cf-4cc2-8fd7-44b661469364","Type":"ContainerStarted","Data":"8b42640d7315ebeb138ccfd4a8ea6e80407c9e34812132ee0e356b73092380e7"} Dec 13 07:21:12 crc kubenswrapper[4644]: I1213 07:21:12.697280 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" event={"ID":"bf00b61e-57cf-4cc2-8fd7-44b661469364","Type":"ContainerStarted","Data":"065c07f6592c618047e211405876597bf3b40a93d8ad2e74d6659455a30d4eb3"} Dec 13 07:21:12 crc kubenswrapper[4644]: I1213 07:21:12.716227 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" podStartSLOduration=2.111272996 podStartE2EDuration="2.716189171s" podCreationTimestamp="2025-12-13 07:21:10 +0000 UTC" firstStartedPulling="2025-12-13 07:21:11.685004245 +0000 UTC m=+2133.899955078" lastFinishedPulling="2025-12-13 07:21:12.289920421 +0000 UTC m=+2134.504871253" observedRunningTime="2025-12-13 07:21:12.714735628 +0000 UTC m=+2134.929686461" watchObservedRunningTime="2025-12-13 07:21:12.716189171 +0000 UTC m=+2134.931140004" Dec 13 07:21:19 crc kubenswrapper[4644]: I1213 07:21:19.748502 4644 generic.go:334] "Generic (PLEG): container finished" podID="bf00b61e-57cf-4cc2-8fd7-44b661469364" containerID="8b42640d7315ebeb138ccfd4a8ea6e80407c9e34812132ee0e356b73092380e7" exitCode=0 Dec 13 07:21:19 crc kubenswrapper[4644]: I1213 07:21:19.748586 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" event={"ID":"bf00b61e-57cf-4cc2-8fd7-44b661469364","Type":"ContainerDied","Data":"8b42640d7315ebeb138ccfd4a8ea6e80407c9e34812132ee0e356b73092380e7"} Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.112861 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.240320 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-inventory\") pod \"bf00b61e-57cf-4cc2-8fd7-44b661469364\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.240424 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4mtt\" (UniqueName: \"kubernetes.io/projected/bf00b61e-57cf-4cc2-8fd7-44b661469364-kube-api-access-g4mtt\") pod \"bf00b61e-57cf-4cc2-8fd7-44b661469364\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.240619 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-ceph\") pod \"bf00b61e-57cf-4cc2-8fd7-44b661469364\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.240748 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-ssh-key\") pod \"bf00b61e-57cf-4cc2-8fd7-44b661469364\" (UID: \"bf00b61e-57cf-4cc2-8fd7-44b661469364\") " Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.248962 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-ceph" (OuterVolumeSpecName: "ceph") pod "bf00b61e-57cf-4cc2-8fd7-44b661469364" (UID: "bf00b61e-57cf-4cc2-8fd7-44b661469364"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.260324 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf00b61e-57cf-4cc2-8fd7-44b661469364-kube-api-access-g4mtt" (OuterVolumeSpecName: "kube-api-access-g4mtt") pod "bf00b61e-57cf-4cc2-8fd7-44b661469364" (UID: "bf00b61e-57cf-4cc2-8fd7-44b661469364"). InnerVolumeSpecName "kube-api-access-g4mtt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.265371 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-inventory" (OuterVolumeSpecName: "inventory") pod "bf00b61e-57cf-4cc2-8fd7-44b661469364" (UID: "bf00b61e-57cf-4cc2-8fd7-44b661469364"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.267548 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bf00b61e-57cf-4cc2-8fd7-44b661469364" (UID: "bf00b61e-57cf-4cc2-8fd7-44b661469364"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.344819 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.344883 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.344897 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4mtt\" (UniqueName: \"kubernetes.io/projected/bf00b61e-57cf-4cc2-8fd7-44b661469364-kube-api-access-g4mtt\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.344909 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf00b61e-57cf-4cc2-8fd7-44b661469364-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.766143 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" event={"ID":"bf00b61e-57cf-4cc2-8fd7-44b661469364","Type":"ContainerDied","Data":"065c07f6592c618047e211405876597bf3b40a93d8ad2e74d6659455a30d4eb3"} Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.766495 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="065c07f6592c618047e211405876597bf3b40a93d8ad2e74d6659455a30d4eb3" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.766225 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.870172 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54"] Dec 13 07:21:21 crc kubenswrapper[4644]: E1213 07:21:21.870535 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf00b61e-57cf-4cc2-8fd7-44b661469364" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.870554 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf00b61e-57cf-4cc2-8fd7-44b661469364" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.870729 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf00b61e-57cf-4cc2-8fd7-44b661469364" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.871402 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.876355 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.876423 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.876848 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.876931 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.876871 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.877135 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.877228 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.877326 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.881275 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54"] Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.959115 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.959186 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.959229 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.959278 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.959301 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4l9lh\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-kube-api-access-4l9lh\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.959347 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.959411 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.959436 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.959479 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.959547 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.959580 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.959684 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:21 crc kubenswrapper[4644]: I1213 07:21:21.959706 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.062322 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.062388 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.062433 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.062539 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.062582 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.062715 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.062748 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.062826 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.062895 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.062934 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.062984 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.063010 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4l9lh\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-kube-api-access-4l9lh\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.063068 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.068922 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.069021 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.071003 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.071396 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.071969 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.072102 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.072140 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.072220 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.072597 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.073033 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.073624 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.073807 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.083368 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4l9lh\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-kube-api-access-4l9lh\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-f8n54\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.190506 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.654550 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54"] Dec 13 07:21:22 crc kubenswrapper[4644]: I1213 07:21:22.775729 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" event={"ID":"251f4f31-9a39-4fd0-a492-09a3ffa3ce11","Type":"ContainerStarted","Data":"d5c34e87d52dd257acea34baeb95fab092260b5c3c8a5dded7d500fe41130750"} Dec 13 07:21:23 crc kubenswrapper[4644]: I1213 07:21:23.783410 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" event={"ID":"251f4f31-9a39-4fd0-a492-09a3ffa3ce11","Type":"ContainerStarted","Data":"1402dcc2e393ac0ffeb4cd6502233ef3c5c37253e81fafff4000f1971916dd34"} Dec 13 07:21:23 crc kubenswrapper[4644]: I1213 07:21:23.803059 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" podStartSLOduration=2.220529113 podStartE2EDuration="2.803040276s" podCreationTimestamp="2025-12-13 07:21:21 +0000 UTC" firstStartedPulling="2025-12-13 07:21:22.660311859 +0000 UTC m=+2144.875262692" lastFinishedPulling="2025-12-13 07:21:23.242823021 +0000 UTC m=+2145.457773855" observedRunningTime="2025-12-13 07:21:23.797355087 +0000 UTC m=+2146.012305921" watchObservedRunningTime="2025-12-13 07:21:23.803040276 +0000 UTC m=+2146.017991110" Dec 13 07:21:46 crc kubenswrapper[4644]: I1213 07:21:46.992815 4644 generic.go:334] "Generic (PLEG): container finished" podID="251f4f31-9a39-4fd0-a492-09a3ffa3ce11" containerID="1402dcc2e393ac0ffeb4cd6502233ef3c5c37253e81fafff4000f1971916dd34" exitCode=0 Dec 13 07:21:46 crc kubenswrapper[4644]: I1213 07:21:46.992920 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" event={"ID":"251f4f31-9a39-4fd0-a492-09a3ffa3ce11","Type":"ContainerDied","Data":"1402dcc2e393ac0ffeb4cd6502233ef3c5c37253e81fafff4000f1971916dd34"} Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.340321 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.479791 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-libvirt-combined-ca-bundle\") pod \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.479874 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ssh-key\") pod \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.480654 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ceph\") pod \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.480718 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ovn-combined-ca-bundle\") pod \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.480759 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4l9lh\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-kube-api-access-4l9lh\") pod \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.480796 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-ovn-default-certs-0\") pod \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.480832 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-neutron-metadata-combined-ca-bundle\") pod \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.480873 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-inventory\") pod \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.480945 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.480976 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-nova-combined-ca-bundle\") pod \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.480999 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.481021 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-bootstrap-combined-ca-bundle\") pod \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.481067 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-repo-setup-combined-ca-bundle\") pod \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\" (UID: \"251f4f31-9a39-4fd0-a492-09a3ffa3ce11\") " Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.487525 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "251f4f31-9a39-4fd0-a492-09a3ffa3ce11" (UID: "251f4f31-9a39-4fd0-a492-09a3ffa3ce11"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.487798 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-kube-api-access-4l9lh" (OuterVolumeSpecName: "kube-api-access-4l9lh") pod "251f4f31-9a39-4fd0-a492-09a3ffa3ce11" (UID: "251f4f31-9a39-4fd0-a492-09a3ffa3ce11"). InnerVolumeSpecName "kube-api-access-4l9lh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.489393 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "251f4f31-9a39-4fd0-a492-09a3ffa3ce11" (UID: "251f4f31-9a39-4fd0-a492-09a3ffa3ce11"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.490101 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "251f4f31-9a39-4fd0-a492-09a3ffa3ce11" (UID: "251f4f31-9a39-4fd0-a492-09a3ffa3ce11"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.490163 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ceph" (OuterVolumeSpecName: "ceph") pod "251f4f31-9a39-4fd0-a492-09a3ffa3ce11" (UID: "251f4f31-9a39-4fd0-a492-09a3ffa3ce11"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.490184 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "251f4f31-9a39-4fd0-a492-09a3ffa3ce11" (UID: "251f4f31-9a39-4fd0-a492-09a3ffa3ce11"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.490205 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "251f4f31-9a39-4fd0-a492-09a3ffa3ce11" (UID: "251f4f31-9a39-4fd0-a492-09a3ffa3ce11"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.490256 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "251f4f31-9a39-4fd0-a492-09a3ffa3ce11" (UID: "251f4f31-9a39-4fd0-a492-09a3ffa3ce11"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.490735 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "251f4f31-9a39-4fd0-a492-09a3ffa3ce11" (UID: "251f4f31-9a39-4fd0-a492-09a3ffa3ce11"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.492382 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "251f4f31-9a39-4fd0-a492-09a3ffa3ce11" (UID: "251f4f31-9a39-4fd0-a492-09a3ffa3ce11"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.492865 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "251f4f31-9a39-4fd0-a492-09a3ffa3ce11" (UID: "251f4f31-9a39-4fd0-a492-09a3ffa3ce11"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.509995 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "251f4f31-9a39-4fd0-a492-09a3ffa3ce11" (UID: "251f4f31-9a39-4fd0-a492-09a3ffa3ce11"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.514725 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-inventory" (OuterVolumeSpecName: "inventory") pod "251f4f31-9a39-4fd0-a492-09a3ffa3ce11" (UID: "251f4f31-9a39-4fd0-a492-09a3ffa3ce11"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.585962 4644 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.586004 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4l9lh\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-kube-api-access-4l9lh\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.586018 4644 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.586036 4644 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.586048 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.586060 4644 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.586075 4644 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.586087 4644 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.586102 4644 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.586111 4644 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.586120 4644 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.586130 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:48 crc kubenswrapper[4644]: I1213 07:21:48.586149 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/251f4f31-9a39-4fd0-a492-09a3ffa3ce11-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.008552 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" event={"ID":"251f4f31-9a39-4fd0-a492-09a3ffa3ce11","Type":"ContainerDied","Data":"d5c34e87d52dd257acea34baeb95fab092260b5c3c8a5dded7d500fe41130750"} Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.008598 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5c34e87d52dd257acea34baeb95fab092260b5c3c8a5dded7d500fe41130750" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.008610 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-f8n54" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.084818 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2"] Dec 13 07:21:49 crc kubenswrapper[4644]: E1213 07:21:49.085167 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="251f4f31-9a39-4fd0-a492-09a3ffa3ce11" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.085187 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="251f4f31-9a39-4fd0-a492-09a3ffa3ce11" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.085374 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="251f4f31-9a39-4fd0-a492-09a3ffa3ce11" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.085945 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.087567 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.087884 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.088111 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.088801 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.088991 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.095165 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2"] Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.198944 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.199519 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.199573 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmsnz\" (UniqueName: \"kubernetes.io/projected/ce2ca357-6142-458b-8b5c-0f722a3a7d86-kube-api-access-hmsnz\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.199759 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.302055 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.302105 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmsnz\" (UniqueName: \"kubernetes.io/projected/ce2ca357-6142-458b-8b5c-0f722a3a7d86-kube-api-access-hmsnz\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.302174 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.302285 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.306770 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.306861 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.307334 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.318380 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmsnz\" (UniqueName: \"kubernetes.io/projected/ce2ca357-6142-458b-8b5c-0f722a3a7d86-kube-api-access-hmsnz\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.408644 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:49 crc kubenswrapper[4644]: I1213 07:21:49.898001 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2"] Dec 13 07:21:50 crc kubenswrapper[4644]: I1213 07:21:50.021842 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" event={"ID":"ce2ca357-6142-458b-8b5c-0f722a3a7d86","Type":"ContainerStarted","Data":"d4443475b6817dfd45d2dc4051fb0179ab2b16a43d0b2e007f0d095d813c11fe"} Dec 13 07:21:51 crc kubenswrapper[4644]: I1213 07:21:51.030993 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" event={"ID":"ce2ca357-6142-458b-8b5c-0f722a3a7d86","Type":"ContainerStarted","Data":"07da774c07a69140b615271743a83341343a427361d5c5fe0ede718386ed3494"} Dec 13 07:21:51 crc kubenswrapper[4644]: I1213 07:21:51.060108 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" podStartSLOduration=1.392075052 podStartE2EDuration="2.06008374s" podCreationTimestamp="2025-12-13 07:21:49 +0000 UTC" firstStartedPulling="2025-12-13 07:21:49.903341813 +0000 UTC m=+2172.118292646" lastFinishedPulling="2025-12-13 07:21:50.5713505 +0000 UTC m=+2172.786301334" observedRunningTime="2025-12-13 07:21:51.049313039 +0000 UTC m=+2173.264263872" watchObservedRunningTime="2025-12-13 07:21:51.06008374 +0000 UTC m=+2173.275034574" Dec 13 07:21:55 crc kubenswrapper[4644]: I1213 07:21:55.082642 4644 generic.go:334] "Generic (PLEG): container finished" podID="ce2ca357-6142-458b-8b5c-0f722a3a7d86" containerID="07da774c07a69140b615271743a83341343a427361d5c5fe0ede718386ed3494" exitCode=0 Dec 13 07:21:55 crc kubenswrapper[4644]: I1213 07:21:55.083165 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" event={"ID":"ce2ca357-6142-458b-8b5c-0f722a3a7d86","Type":"ContainerDied","Data":"07da774c07a69140b615271743a83341343a427361d5c5fe0ede718386ed3494"} Dec 13 07:21:56 crc kubenswrapper[4644]: I1213 07:21:56.504737 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:56 crc kubenswrapper[4644]: I1213 07:21:56.568638 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmsnz\" (UniqueName: \"kubernetes.io/projected/ce2ca357-6142-458b-8b5c-0f722a3a7d86-kube-api-access-hmsnz\") pod \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " Dec 13 07:21:56 crc kubenswrapper[4644]: I1213 07:21:56.568830 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-ssh-key\") pod \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " Dec 13 07:21:56 crc kubenswrapper[4644]: I1213 07:21:56.568938 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-inventory\") pod \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " Dec 13 07:21:56 crc kubenswrapper[4644]: I1213 07:21:56.570029 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-ceph\") pod \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\" (UID: \"ce2ca357-6142-458b-8b5c-0f722a3a7d86\") " Dec 13 07:21:56 crc kubenswrapper[4644]: I1213 07:21:56.577261 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-ceph" (OuterVolumeSpecName: "ceph") pod "ce2ca357-6142-458b-8b5c-0f722a3a7d86" (UID: "ce2ca357-6142-458b-8b5c-0f722a3a7d86"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:56 crc kubenswrapper[4644]: I1213 07:21:56.577630 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce2ca357-6142-458b-8b5c-0f722a3a7d86-kube-api-access-hmsnz" (OuterVolumeSpecName: "kube-api-access-hmsnz") pod "ce2ca357-6142-458b-8b5c-0f722a3a7d86" (UID: "ce2ca357-6142-458b-8b5c-0f722a3a7d86"). InnerVolumeSpecName "kube-api-access-hmsnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:21:56 crc kubenswrapper[4644]: I1213 07:21:56.597235 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ce2ca357-6142-458b-8b5c-0f722a3a7d86" (UID: "ce2ca357-6142-458b-8b5c-0f722a3a7d86"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:56 crc kubenswrapper[4644]: I1213 07:21:56.597376 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-inventory" (OuterVolumeSpecName: "inventory") pod "ce2ca357-6142-458b-8b5c-0f722a3a7d86" (UID: "ce2ca357-6142-458b-8b5c-0f722a3a7d86"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:21:56 crc kubenswrapper[4644]: I1213 07:21:56.674358 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:56 crc kubenswrapper[4644]: I1213 07:21:56.674457 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:56 crc kubenswrapper[4644]: I1213 07:21:56.674474 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ce2ca357-6142-458b-8b5c-0f722a3a7d86-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:56 crc kubenswrapper[4644]: I1213 07:21:56.674491 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmsnz\" (UniqueName: \"kubernetes.io/projected/ce2ca357-6142-458b-8b5c-0f722a3a7d86-kube-api-access-hmsnz\") on node \"crc\" DevicePath \"\"" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.105291 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" event={"ID":"ce2ca357-6142-458b-8b5c-0f722a3a7d86","Type":"ContainerDied","Data":"d4443475b6817dfd45d2dc4051fb0179ab2b16a43d0b2e007f0d095d813c11fe"} Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.105374 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4443475b6817dfd45d2dc4051fb0179ab2b16a43d0b2e007f0d095d813c11fe" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.105331 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.179875 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh"] Dec 13 07:21:57 crc kubenswrapper[4644]: E1213 07:21:57.180559 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce2ca357-6142-458b-8b5c-0f722a3a7d86" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.180582 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce2ca357-6142-458b-8b5c-0f722a3a7d86" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.180811 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce2ca357-6142-458b-8b5c-0f722a3a7d86" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.181474 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.184482 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.184584 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.184706 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.185222 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.185668 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.187414 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.194128 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh"] Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.287722 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.287767 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.287911 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4vm7\" (UniqueName: \"kubernetes.io/projected/c8ae022b-1ddd-4d25-b060-b973b7925fb4-kube-api-access-v4vm7\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.287992 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.288193 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.288217 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.389544 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.389605 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.389676 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4vm7\" (UniqueName: \"kubernetes.io/projected/c8ae022b-1ddd-4d25-b060-b973b7925fb4-kube-api-access-v4vm7\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.389720 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.389791 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.389810 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.390593 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.397964 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.397997 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.397977 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.398723 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.405785 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4vm7\" (UniqueName: \"kubernetes.io/projected/c8ae022b-1ddd-4d25-b060-b973b7925fb4-kube-api-access-v4vm7\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jqzfh\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.496463 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:21:57 crc kubenswrapper[4644]: I1213 07:21:57.986371 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh"] Dec 13 07:21:58 crc kubenswrapper[4644]: I1213 07:21:58.115672 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" event={"ID":"c8ae022b-1ddd-4d25-b060-b973b7925fb4","Type":"ContainerStarted","Data":"9aa7c6b02b4980d3872df07f5833d111ce268b481e3fadd8b99eae93b52a29f0"} Dec 13 07:21:59 crc kubenswrapper[4644]: I1213 07:21:59.128924 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" event={"ID":"c8ae022b-1ddd-4d25-b060-b973b7925fb4","Type":"ContainerStarted","Data":"4c69b19c944be7ed0c48e7dcc4ef941172d445c4d3fe36ee5d97fe03ee7543fe"} Dec 13 07:21:59 crc kubenswrapper[4644]: I1213 07:21:59.154323 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" podStartSLOduration=1.584275374 podStartE2EDuration="2.154302663s" podCreationTimestamp="2025-12-13 07:21:57 +0000 UTC" firstStartedPulling="2025-12-13 07:21:57.991095281 +0000 UTC m=+2180.206046114" lastFinishedPulling="2025-12-13 07:21:58.561122571 +0000 UTC m=+2180.776073403" observedRunningTime="2025-12-13 07:21:59.152535982 +0000 UTC m=+2181.367486815" watchObservedRunningTime="2025-12-13 07:21:59.154302663 +0000 UTC m=+2181.369253497" Dec 13 07:22:39 crc kubenswrapper[4644]: I1213 07:22:39.753762 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:22:39 crc kubenswrapper[4644]: I1213 07:22:39.754313 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:22:52 crc kubenswrapper[4644]: I1213 07:22:52.596965 4644 generic.go:334] "Generic (PLEG): container finished" podID="c8ae022b-1ddd-4d25-b060-b973b7925fb4" containerID="4c69b19c944be7ed0c48e7dcc4ef941172d445c4d3fe36ee5d97fe03ee7543fe" exitCode=0 Dec 13 07:22:52 crc kubenswrapper[4644]: I1213 07:22:52.597053 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" event={"ID":"c8ae022b-1ddd-4d25-b060-b973b7925fb4","Type":"ContainerDied","Data":"4c69b19c944be7ed0c48e7dcc4ef941172d445c4d3fe36ee5d97fe03ee7543fe"} Dec 13 07:22:53 crc kubenswrapper[4644]: I1213 07:22:53.907614 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.007849 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ovn-combined-ca-bundle\") pod \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.007935 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-inventory\") pod \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.007994 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4vm7\" (UniqueName: \"kubernetes.io/projected/c8ae022b-1ddd-4d25-b060-b973b7925fb4-kube-api-access-v4vm7\") pod \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.008252 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ovncontroller-config-0\") pod \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.008323 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ceph\") pod \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.009080 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ssh-key\") pod \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\" (UID: \"c8ae022b-1ddd-4d25-b060-b973b7925fb4\") " Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.015543 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8ae022b-1ddd-4d25-b060-b973b7925fb4-kube-api-access-v4vm7" (OuterVolumeSpecName: "kube-api-access-v4vm7") pod "c8ae022b-1ddd-4d25-b060-b973b7925fb4" (UID: "c8ae022b-1ddd-4d25-b060-b973b7925fb4"). InnerVolumeSpecName "kube-api-access-v4vm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.016196 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ceph" (OuterVolumeSpecName: "ceph") pod "c8ae022b-1ddd-4d25-b060-b973b7925fb4" (UID: "c8ae022b-1ddd-4d25-b060-b973b7925fb4"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.016319 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "c8ae022b-1ddd-4d25-b060-b973b7925fb4" (UID: "c8ae022b-1ddd-4d25-b060-b973b7925fb4"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.030948 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "c8ae022b-1ddd-4d25-b060-b973b7925fb4" (UID: "c8ae022b-1ddd-4d25-b060-b973b7925fb4"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.031748 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-inventory" (OuterVolumeSpecName: "inventory") pod "c8ae022b-1ddd-4d25-b060-b973b7925fb4" (UID: "c8ae022b-1ddd-4d25-b060-b973b7925fb4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.032799 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c8ae022b-1ddd-4d25-b060-b973b7925fb4" (UID: "c8ae022b-1ddd-4d25-b060-b973b7925fb4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.113099 4644 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.113131 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.113144 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.113154 4644 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.113163 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c8ae022b-1ddd-4d25-b060-b973b7925fb4-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.113173 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4vm7\" (UniqueName: \"kubernetes.io/projected/c8ae022b-1ddd-4d25-b060-b973b7925fb4-kube-api-access-v4vm7\") on node \"crc\" DevicePath \"\"" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.620372 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" event={"ID":"c8ae022b-1ddd-4d25-b060-b973b7925fb4","Type":"ContainerDied","Data":"9aa7c6b02b4980d3872df07f5833d111ce268b481e3fadd8b99eae93b52a29f0"} Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.620758 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9aa7c6b02b4980d3872df07f5833d111ce268b481e3fadd8b99eae93b52a29f0" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.620466 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jqzfh" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.687777 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5"] Dec 13 07:22:54 crc kubenswrapper[4644]: E1213 07:22:54.688181 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8ae022b-1ddd-4d25-b060-b973b7925fb4" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.688205 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8ae022b-1ddd-4d25-b060-b973b7925fb4" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.688408 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8ae022b-1ddd-4d25-b060-b973b7925fb4" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.689077 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.692239 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.692530 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.692559 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.692559 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.692540 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.692992 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.693276 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.702269 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5"] Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.825069 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xghjd\" (UniqueName: \"kubernetes.io/projected/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-kube-api-access-xghjd\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.825161 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.825524 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.825601 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.825691 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.825776 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.825847 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.928149 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.928281 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.928353 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.928405 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xghjd\" (UniqueName: \"kubernetes.io/projected/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-kube-api-access-xghjd\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.928493 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.928549 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.928624 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.933864 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.934062 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.934123 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.934520 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.934536 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.934667 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:54 crc kubenswrapper[4644]: I1213 07:22:54.942214 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xghjd\" (UniqueName: \"kubernetes.io/projected/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-kube-api-access-xghjd\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:55 crc kubenswrapper[4644]: I1213 07:22:55.004148 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:22:55 crc kubenswrapper[4644]: I1213 07:22:55.493361 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5"] Dec 13 07:22:55 crc kubenswrapper[4644]: I1213 07:22:55.628567 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" event={"ID":"08e5b710-aa24-40c9-9bd6-6d03543cc7b7","Type":"ContainerStarted","Data":"6d6dd56d7740324816fc16cfd1082879f7fa6858b4c7a7cccac2864c8a703930"} Dec 13 07:22:56 crc kubenswrapper[4644]: I1213 07:22:56.650291 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" event={"ID":"08e5b710-aa24-40c9-9bd6-6d03543cc7b7","Type":"ContainerStarted","Data":"50bed08f5536717cd5bdff4733c3f50e343aa8015de4a1fa87ddf028c36c2b28"} Dec 13 07:22:56 crc kubenswrapper[4644]: I1213 07:22:56.670406 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" podStartSLOduration=2.037321128 podStartE2EDuration="2.670387717s" podCreationTimestamp="2025-12-13 07:22:54 +0000 UTC" firstStartedPulling="2025-12-13 07:22:55.497704447 +0000 UTC m=+2237.712655280" lastFinishedPulling="2025-12-13 07:22:56.130771035 +0000 UTC m=+2238.345721869" observedRunningTime="2025-12-13 07:22:56.668790224 +0000 UTC m=+2238.883741057" watchObservedRunningTime="2025-12-13 07:22:56.670387717 +0000 UTC m=+2238.885338550" Dec 13 07:23:09 crc kubenswrapper[4644]: I1213 07:23:09.754504 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:23:09 crc kubenswrapper[4644]: I1213 07:23:09.754984 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:23:27 crc kubenswrapper[4644]: I1213 07:23:27.474729 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9l8nc"] Dec 13 07:23:27 crc kubenswrapper[4644]: I1213 07:23:27.477548 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:27 crc kubenswrapper[4644]: I1213 07:23:27.484972 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9l8nc"] Dec 13 07:23:27 crc kubenswrapper[4644]: I1213 07:23:27.589160 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-utilities\") pod \"certified-operators-9l8nc\" (UID: \"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d\") " pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:27 crc kubenswrapper[4644]: I1213 07:23:27.589262 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-995cr\" (UniqueName: \"kubernetes.io/projected/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-kube-api-access-995cr\") pod \"certified-operators-9l8nc\" (UID: \"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d\") " pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:27 crc kubenswrapper[4644]: I1213 07:23:27.589324 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-catalog-content\") pod \"certified-operators-9l8nc\" (UID: \"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d\") " pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:27 crc kubenswrapper[4644]: I1213 07:23:27.691531 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-utilities\") pod \"certified-operators-9l8nc\" (UID: \"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d\") " pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:27 crc kubenswrapper[4644]: I1213 07:23:27.691633 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-995cr\" (UniqueName: \"kubernetes.io/projected/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-kube-api-access-995cr\") pod \"certified-operators-9l8nc\" (UID: \"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d\") " pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:27 crc kubenswrapper[4644]: I1213 07:23:27.691674 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-catalog-content\") pod \"certified-operators-9l8nc\" (UID: \"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d\") " pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:27 crc kubenswrapper[4644]: I1213 07:23:27.692171 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-utilities\") pod \"certified-operators-9l8nc\" (UID: \"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d\") " pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:27 crc kubenswrapper[4644]: I1213 07:23:27.692227 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-catalog-content\") pod \"certified-operators-9l8nc\" (UID: \"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d\") " pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:27 crc kubenswrapper[4644]: I1213 07:23:27.709658 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-995cr\" (UniqueName: \"kubernetes.io/projected/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-kube-api-access-995cr\") pod \"certified-operators-9l8nc\" (UID: \"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d\") " pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:27 crc kubenswrapper[4644]: I1213 07:23:27.795955 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:28 crc kubenswrapper[4644]: I1213 07:23:28.235648 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9l8nc"] Dec 13 07:23:28 crc kubenswrapper[4644]: W1213 07:23:28.239613 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9bbf454a_f8d0_4af6_b490_a5cde0c61e5d.slice/crio-a26fb24478cd29f23d0e46bd3cd6cbc268d7abad55d2dee6ae3746f854c75234 WatchSource:0}: Error finding container a26fb24478cd29f23d0e46bd3cd6cbc268d7abad55d2dee6ae3746f854c75234: Status 404 returned error can't find the container with id a26fb24478cd29f23d0e46bd3cd6cbc268d7abad55d2dee6ae3746f854c75234 Dec 13 07:23:28 crc kubenswrapper[4644]: I1213 07:23:28.922804 4644 generic.go:334] "Generic (PLEG): container finished" podID="9bbf454a-f8d0-4af6-b490-a5cde0c61e5d" containerID="21b16ef5e125b6e96b2e411da4a63aa394f2e1b15d0f68c1c3775398170c9a31" exitCode=0 Dec 13 07:23:28 crc kubenswrapper[4644]: I1213 07:23:28.922897 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9l8nc" event={"ID":"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d","Type":"ContainerDied","Data":"21b16ef5e125b6e96b2e411da4a63aa394f2e1b15d0f68c1c3775398170c9a31"} Dec 13 07:23:28 crc kubenswrapper[4644]: I1213 07:23:28.923124 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9l8nc" event={"ID":"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d","Type":"ContainerStarted","Data":"a26fb24478cd29f23d0e46bd3cd6cbc268d7abad55d2dee6ae3746f854c75234"} Dec 13 07:23:30 crc kubenswrapper[4644]: I1213 07:23:30.941218 4644 generic.go:334] "Generic (PLEG): container finished" podID="9bbf454a-f8d0-4af6-b490-a5cde0c61e5d" containerID="ee9430944247638b40c6aa2397087559aa4c9960a8711ff73aa6867af14d2d49" exitCode=0 Dec 13 07:23:30 crc kubenswrapper[4644]: I1213 07:23:30.941326 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9l8nc" event={"ID":"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d","Type":"ContainerDied","Data":"ee9430944247638b40c6aa2397087559aa4c9960a8711ff73aa6867af14d2d49"} Dec 13 07:23:31 crc kubenswrapper[4644]: I1213 07:23:31.953715 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9l8nc" event={"ID":"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d","Type":"ContainerStarted","Data":"bf72f528362b53c770b9c4ec1752868f156a1be4f9506e87581406b334186660"} Dec 13 07:23:31 crc kubenswrapper[4644]: I1213 07:23:31.973982 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9l8nc" podStartSLOduration=2.421399518 podStartE2EDuration="4.973966562s" podCreationTimestamp="2025-12-13 07:23:27 +0000 UTC" firstStartedPulling="2025-12-13 07:23:28.924723414 +0000 UTC m=+2271.139674247" lastFinishedPulling="2025-12-13 07:23:31.477290468 +0000 UTC m=+2273.692241291" observedRunningTime="2025-12-13 07:23:31.967427476 +0000 UTC m=+2274.182378310" watchObservedRunningTime="2025-12-13 07:23:31.973966562 +0000 UTC m=+2274.188917395" Dec 13 07:23:37 crc kubenswrapper[4644]: I1213 07:23:37.797097 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:37 crc kubenswrapper[4644]: I1213 07:23:37.797802 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:37 crc kubenswrapper[4644]: I1213 07:23:37.835319 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:38 crc kubenswrapper[4644]: I1213 07:23:38.082970 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:38 crc kubenswrapper[4644]: I1213 07:23:38.127635 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9l8nc"] Dec 13 07:23:39 crc kubenswrapper[4644]: I1213 07:23:39.754112 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:23:39 crc kubenswrapper[4644]: I1213 07:23:39.754491 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:23:39 crc kubenswrapper[4644]: I1213 07:23:39.754553 4644 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 07:23:39 crc kubenswrapper[4644]: I1213 07:23:39.755636 4644 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec"} pod="openshift-machine-config-operator/machine-config-daemon-45tj4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 07:23:39 crc kubenswrapper[4644]: I1213 07:23:39.755704 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" containerID="cri-o://385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" gracePeriod=600 Dec 13 07:23:39 crc kubenswrapper[4644]: E1213 07:23:39.879347 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:23:40 crc kubenswrapper[4644]: I1213 07:23:40.017104 4644 generic.go:334] "Generic (PLEG): container finished" podID="48240f19-087e-4597-b448-ab1a190a5027" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" exitCode=0 Dec 13 07:23:40 crc kubenswrapper[4644]: I1213 07:23:40.017171 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerDied","Data":"385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec"} Dec 13 07:23:40 crc kubenswrapper[4644]: I1213 07:23:40.017236 4644 scope.go:117] "RemoveContainer" containerID="6717e4b811a9dd73f331b0daf8793ce527b10ce24a5fb2d5fd04626c666ac9d3" Dec 13 07:23:40 crc kubenswrapper[4644]: I1213 07:23:40.017787 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9l8nc" podUID="9bbf454a-f8d0-4af6-b490-a5cde0c61e5d" containerName="registry-server" containerID="cri-o://bf72f528362b53c770b9c4ec1752868f156a1be4f9506e87581406b334186660" gracePeriod=2 Dec 13 07:23:40 crc kubenswrapper[4644]: I1213 07:23:40.018026 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:23:40 crc kubenswrapper[4644]: E1213 07:23:40.018436 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:23:40 crc kubenswrapper[4644]: I1213 07:23:40.381529 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:40 crc kubenswrapper[4644]: I1213 07:23:40.446532 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-utilities\") pod \"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d\" (UID: \"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d\") " Dec 13 07:23:40 crc kubenswrapper[4644]: I1213 07:23:40.446646 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-catalog-content\") pod \"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d\" (UID: \"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d\") " Dec 13 07:23:40 crc kubenswrapper[4644]: I1213 07:23:40.446728 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-995cr\" (UniqueName: \"kubernetes.io/projected/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-kube-api-access-995cr\") pod \"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d\" (UID: \"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d\") " Dec 13 07:23:40 crc kubenswrapper[4644]: I1213 07:23:40.447454 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-utilities" (OuterVolumeSpecName: "utilities") pod "9bbf454a-f8d0-4af6-b490-a5cde0c61e5d" (UID: "9bbf454a-f8d0-4af6-b490-a5cde0c61e5d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:23:40 crc kubenswrapper[4644]: I1213 07:23:40.447617 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:23:40 crc kubenswrapper[4644]: I1213 07:23:40.453280 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-kube-api-access-995cr" (OuterVolumeSpecName: "kube-api-access-995cr") pod "9bbf454a-f8d0-4af6-b490-a5cde0c61e5d" (UID: "9bbf454a-f8d0-4af6-b490-a5cde0c61e5d"). InnerVolumeSpecName "kube-api-access-995cr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:23:40 crc kubenswrapper[4644]: I1213 07:23:40.498005 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9bbf454a-f8d0-4af6-b490-a5cde0c61e5d" (UID: "9bbf454a-f8d0-4af6-b490-a5cde0c61e5d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:23:40 crc kubenswrapper[4644]: I1213 07:23:40.549331 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:23:40 crc kubenswrapper[4644]: I1213 07:23:40.549369 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-995cr\" (UniqueName: \"kubernetes.io/projected/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d-kube-api-access-995cr\") on node \"crc\" DevicePath \"\"" Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.031726 4644 generic.go:334] "Generic (PLEG): container finished" podID="9bbf454a-f8d0-4af6-b490-a5cde0c61e5d" containerID="bf72f528362b53c770b9c4ec1752868f156a1be4f9506e87581406b334186660" exitCode=0 Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.031830 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9l8nc" event={"ID":"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d","Type":"ContainerDied","Data":"bf72f528362b53c770b9c4ec1752868f156a1be4f9506e87581406b334186660"} Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.031882 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9l8nc" event={"ID":"9bbf454a-f8d0-4af6-b490-a5cde0c61e5d","Type":"ContainerDied","Data":"a26fb24478cd29f23d0e46bd3cd6cbc268d7abad55d2dee6ae3746f854c75234"} Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.031907 4644 scope.go:117] "RemoveContainer" containerID="bf72f528362b53c770b9c4ec1752868f156a1be4f9506e87581406b334186660" Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.033280 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9l8nc" Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.038023 4644 generic.go:334] "Generic (PLEG): container finished" podID="08e5b710-aa24-40c9-9bd6-6d03543cc7b7" containerID="50bed08f5536717cd5bdff4733c3f50e343aa8015de4a1fa87ddf028c36c2b28" exitCode=0 Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.038062 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" event={"ID":"08e5b710-aa24-40c9-9bd6-6d03543cc7b7","Type":"ContainerDied","Data":"50bed08f5536717cd5bdff4733c3f50e343aa8015de4a1fa87ddf028c36c2b28"} Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.054702 4644 scope.go:117] "RemoveContainer" containerID="ee9430944247638b40c6aa2397087559aa4c9960a8711ff73aa6867af14d2d49" Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.070715 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9l8nc"] Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.077183 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9l8nc"] Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.088055 4644 scope.go:117] "RemoveContainer" containerID="21b16ef5e125b6e96b2e411da4a63aa394f2e1b15d0f68c1c3775398170c9a31" Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.111230 4644 scope.go:117] "RemoveContainer" containerID="bf72f528362b53c770b9c4ec1752868f156a1be4f9506e87581406b334186660" Dec 13 07:23:41 crc kubenswrapper[4644]: E1213 07:23:41.111807 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf72f528362b53c770b9c4ec1752868f156a1be4f9506e87581406b334186660\": container with ID starting with bf72f528362b53c770b9c4ec1752868f156a1be4f9506e87581406b334186660 not found: ID does not exist" containerID="bf72f528362b53c770b9c4ec1752868f156a1be4f9506e87581406b334186660" Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.111863 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf72f528362b53c770b9c4ec1752868f156a1be4f9506e87581406b334186660"} err="failed to get container status \"bf72f528362b53c770b9c4ec1752868f156a1be4f9506e87581406b334186660\": rpc error: code = NotFound desc = could not find container \"bf72f528362b53c770b9c4ec1752868f156a1be4f9506e87581406b334186660\": container with ID starting with bf72f528362b53c770b9c4ec1752868f156a1be4f9506e87581406b334186660 not found: ID does not exist" Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.111895 4644 scope.go:117] "RemoveContainer" containerID="ee9430944247638b40c6aa2397087559aa4c9960a8711ff73aa6867af14d2d49" Dec 13 07:23:41 crc kubenswrapper[4644]: E1213 07:23:41.112636 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee9430944247638b40c6aa2397087559aa4c9960a8711ff73aa6867af14d2d49\": container with ID starting with ee9430944247638b40c6aa2397087559aa4c9960a8711ff73aa6867af14d2d49 not found: ID does not exist" containerID="ee9430944247638b40c6aa2397087559aa4c9960a8711ff73aa6867af14d2d49" Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.112670 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee9430944247638b40c6aa2397087559aa4c9960a8711ff73aa6867af14d2d49"} err="failed to get container status \"ee9430944247638b40c6aa2397087559aa4c9960a8711ff73aa6867af14d2d49\": rpc error: code = NotFound desc = could not find container \"ee9430944247638b40c6aa2397087559aa4c9960a8711ff73aa6867af14d2d49\": container with ID starting with ee9430944247638b40c6aa2397087559aa4c9960a8711ff73aa6867af14d2d49 not found: ID does not exist" Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.112690 4644 scope.go:117] "RemoveContainer" containerID="21b16ef5e125b6e96b2e411da4a63aa394f2e1b15d0f68c1c3775398170c9a31" Dec 13 07:23:41 crc kubenswrapper[4644]: E1213 07:23:41.113403 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21b16ef5e125b6e96b2e411da4a63aa394f2e1b15d0f68c1c3775398170c9a31\": container with ID starting with 21b16ef5e125b6e96b2e411da4a63aa394f2e1b15d0f68c1c3775398170c9a31 not found: ID does not exist" containerID="21b16ef5e125b6e96b2e411da4a63aa394f2e1b15d0f68c1c3775398170c9a31" Dec 13 07:23:41 crc kubenswrapper[4644]: I1213 07:23:41.113427 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21b16ef5e125b6e96b2e411da4a63aa394f2e1b15d0f68c1c3775398170c9a31"} err="failed to get container status \"21b16ef5e125b6e96b2e411da4a63aa394f2e1b15d0f68c1c3775398170c9a31\": rpc error: code = NotFound desc = could not find container \"21b16ef5e125b6e96b2e411da4a63aa394f2e1b15d0f68c1c3775398170c9a31\": container with ID starting with 21b16ef5e125b6e96b2e411da4a63aa394f2e1b15d0f68c1c3775398170c9a31 not found: ID does not exist" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.403222 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bbf454a-f8d0-4af6-b490-a5cde0c61e5d" path="/var/lib/kubelet/pods/9bbf454a-f8d0-4af6-b490-a5cde0c61e5d/volumes" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.422973 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.495677 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-inventory\") pod \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.495728 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-ssh-key\") pod \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.495831 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xghjd\" (UniqueName: \"kubernetes.io/projected/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-kube-api-access-xghjd\") pod \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.495879 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-neutron-metadata-combined-ca-bundle\") pod \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.495991 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-nova-metadata-neutron-config-0\") pod \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.496049 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-neutron-ovn-metadata-agent-neutron-config-0\") pod \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.496098 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-ceph\") pod \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\" (UID: \"08e5b710-aa24-40c9-9bd6-6d03543cc7b7\") " Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.501606 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-ceph" (OuterVolumeSpecName: "ceph") pod "08e5b710-aa24-40c9-9bd6-6d03543cc7b7" (UID: "08e5b710-aa24-40c9-9bd6-6d03543cc7b7"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.501699 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-kube-api-access-xghjd" (OuterVolumeSpecName: "kube-api-access-xghjd") pod "08e5b710-aa24-40c9-9bd6-6d03543cc7b7" (UID: "08e5b710-aa24-40c9-9bd6-6d03543cc7b7"). InnerVolumeSpecName "kube-api-access-xghjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.508333 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "08e5b710-aa24-40c9-9bd6-6d03543cc7b7" (UID: "08e5b710-aa24-40c9-9bd6-6d03543cc7b7"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.519470 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "08e5b710-aa24-40c9-9bd6-6d03543cc7b7" (UID: "08e5b710-aa24-40c9-9bd6-6d03543cc7b7"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.519501 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-inventory" (OuterVolumeSpecName: "inventory") pod "08e5b710-aa24-40c9-9bd6-6d03543cc7b7" (UID: "08e5b710-aa24-40c9-9bd6-6d03543cc7b7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.519902 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "08e5b710-aa24-40c9-9bd6-6d03543cc7b7" (UID: "08e5b710-aa24-40c9-9bd6-6d03543cc7b7"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.538745 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "08e5b710-aa24-40c9-9bd6-6d03543cc7b7" (UID: "08e5b710-aa24-40c9-9bd6-6d03543cc7b7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.599569 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.599600 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.599613 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xghjd\" (UniqueName: \"kubernetes.io/projected/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-kube-api-access-xghjd\") on node \"crc\" DevicePath \"\"" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.599626 4644 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.599637 4644 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.599648 4644 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 13 07:23:42 crc kubenswrapper[4644]: I1213 07:23:42.599658 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/08e5b710-aa24-40c9-9bd6-6d03543cc7b7-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.055141 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" event={"ID":"08e5b710-aa24-40c9-9bd6-6d03543cc7b7","Type":"ContainerDied","Data":"6d6dd56d7740324816fc16cfd1082879f7fa6858b4c7a7cccac2864c8a703930"} Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.055191 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d6dd56d7740324816fc16cfd1082879f7fa6858b4c7a7cccac2864c8a703930" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.055600 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.183245 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw"] Dec 13 07:23:43 crc kubenswrapper[4644]: E1213 07:23:43.183663 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bbf454a-f8d0-4af6-b490-a5cde0c61e5d" containerName="extract-utilities" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.183685 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bbf454a-f8d0-4af6-b490-a5cde0c61e5d" containerName="extract-utilities" Dec 13 07:23:43 crc kubenswrapper[4644]: E1213 07:23:43.183710 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bbf454a-f8d0-4af6-b490-a5cde0c61e5d" containerName="extract-content" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.183716 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bbf454a-f8d0-4af6-b490-a5cde0c61e5d" containerName="extract-content" Dec 13 07:23:43 crc kubenswrapper[4644]: E1213 07:23:43.183731 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e5b710-aa24-40c9-9bd6-6d03543cc7b7" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.183739 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e5b710-aa24-40c9-9bd6-6d03543cc7b7" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 13 07:23:43 crc kubenswrapper[4644]: E1213 07:23:43.183748 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bbf454a-f8d0-4af6-b490-a5cde0c61e5d" containerName="registry-server" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.183753 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bbf454a-f8d0-4af6-b490-a5cde0c61e5d" containerName="registry-server" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.183911 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bbf454a-f8d0-4af6-b490-a5cde0c61e5d" containerName="registry-server" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.183926 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="08e5b710-aa24-40c9-9bd6-6d03543cc7b7" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.184565 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.191574 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.191718 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.192518 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw"] Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.192652 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.193584 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.193657 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.193755 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.208411 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x95r\" (UniqueName: \"kubernetes.io/projected/51c69e69-78ac-4480-8007-1b306d4ef7bf-kube-api-access-2x95r\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.208487 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.208541 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.208611 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.208676 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.208777 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.311382 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.311543 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.311727 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.311884 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.312116 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.312265 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x95r\" (UniqueName: \"kubernetes.io/projected/51c69e69-78ac-4480-8007-1b306d4ef7bf-kube-api-access-2x95r\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.316507 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.317524 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.318181 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.319398 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.320945 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.331633 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x95r\" (UniqueName: \"kubernetes.io/projected/51c69e69-78ac-4480-8007-1b306d4ef7bf-kube-api-access-2x95r\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.505337 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:23:43 crc kubenswrapper[4644]: I1213 07:23:43.994932 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw"] Dec 13 07:23:44 crc kubenswrapper[4644]: I1213 07:23:44.067245 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" event={"ID":"51c69e69-78ac-4480-8007-1b306d4ef7bf","Type":"ContainerStarted","Data":"f995a118ff0a56d0577913b909bb21eba5b8b158a9602632c4afc876aa58da77"} Dec 13 07:23:45 crc kubenswrapper[4644]: I1213 07:23:45.080155 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" event={"ID":"51c69e69-78ac-4480-8007-1b306d4ef7bf","Type":"ContainerStarted","Data":"8ecf25efdade36f60b53236219a6b36d2de025f1943ade2ae63e310997e78983"} Dec 13 07:23:45 crc kubenswrapper[4644]: I1213 07:23:45.098208 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" podStartSLOduration=1.435060833 podStartE2EDuration="2.098185777s" podCreationTimestamp="2025-12-13 07:23:43 +0000 UTC" firstStartedPulling="2025-12-13 07:23:43.99869216 +0000 UTC m=+2286.213642993" lastFinishedPulling="2025-12-13 07:23:44.661817105 +0000 UTC m=+2286.876767937" observedRunningTime="2025-12-13 07:23:45.097200203 +0000 UTC m=+2287.312151036" watchObservedRunningTime="2025-12-13 07:23:45.098185777 +0000 UTC m=+2287.313136609" Dec 13 07:23:55 crc kubenswrapper[4644]: I1213 07:23:55.390288 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:23:55 crc kubenswrapper[4644]: E1213 07:23:55.391999 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:24:08 crc kubenswrapper[4644]: I1213 07:24:08.394210 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:24:08 crc kubenswrapper[4644]: E1213 07:24:08.395814 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:24:20 crc kubenswrapper[4644]: I1213 07:24:20.388856 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:24:20 crc kubenswrapper[4644]: E1213 07:24:20.389951 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:24:33 crc kubenswrapper[4644]: I1213 07:24:33.389712 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:24:33 crc kubenswrapper[4644]: E1213 07:24:33.390840 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:24:44 crc kubenswrapper[4644]: I1213 07:24:44.391035 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:24:44 crc kubenswrapper[4644]: E1213 07:24:44.392109 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:24:59 crc kubenswrapper[4644]: I1213 07:24:59.389064 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:24:59 crc kubenswrapper[4644]: E1213 07:24:59.390087 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:25:14 crc kubenswrapper[4644]: I1213 07:25:14.389574 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:25:14 crc kubenswrapper[4644]: E1213 07:25:14.390543 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:25:27 crc kubenswrapper[4644]: I1213 07:25:27.389136 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:25:27 crc kubenswrapper[4644]: E1213 07:25:27.390037 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:25:41 crc kubenswrapper[4644]: I1213 07:25:41.389169 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:25:41 crc kubenswrapper[4644]: E1213 07:25:41.390549 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:25:53 crc kubenswrapper[4644]: I1213 07:25:53.391071 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:25:53 crc kubenswrapper[4644]: E1213 07:25:53.393152 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:26:05 crc kubenswrapper[4644]: I1213 07:26:05.390263 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:26:05 crc kubenswrapper[4644]: E1213 07:26:05.391226 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:26:16 crc kubenswrapper[4644]: I1213 07:26:16.389761 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:26:16 crc kubenswrapper[4644]: E1213 07:26:16.390724 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:26:28 crc kubenswrapper[4644]: I1213 07:26:28.394436 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:26:28 crc kubenswrapper[4644]: E1213 07:26:28.395467 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:26:39 crc kubenswrapper[4644]: I1213 07:26:39.390369 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:26:39 crc kubenswrapper[4644]: E1213 07:26:39.391846 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:26:52 crc kubenswrapper[4644]: I1213 07:26:52.389981 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:26:52 crc kubenswrapper[4644]: E1213 07:26:52.391035 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:27:03 crc kubenswrapper[4644]: I1213 07:27:03.390084 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:27:03 crc kubenswrapper[4644]: E1213 07:27:03.391186 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:27:07 crc kubenswrapper[4644]: I1213 07:27:07.894525 4644 generic.go:334] "Generic (PLEG): container finished" podID="51c69e69-78ac-4480-8007-1b306d4ef7bf" containerID="8ecf25efdade36f60b53236219a6b36d2de025f1943ade2ae63e310997e78983" exitCode=0 Dec 13 07:27:07 crc kubenswrapper[4644]: I1213 07:27:07.894613 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" event={"ID":"51c69e69-78ac-4480-8007-1b306d4ef7bf","Type":"ContainerDied","Data":"8ecf25efdade36f60b53236219a6b36d2de025f1943ade2ae63e310997e78983"} Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.238886 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.283588 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-inventory\") pod \"51c69e69-78ac-4480-8007-1b306d4ef7bf\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.283865 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2x95r\" (UniqueName: \"kubernetes.io/projected/51c69e69-78ac-4480-8007-1b306d4ef7bf-kube-api-access-2x95r\") pod \"51c69e69-78ac-4480-8007-1b306d4ef7bf\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.283939 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-ceph\") pod \"51c69e69-78ac-4480-8007-1b306d4ef7bf\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.284067 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-ssh-key\") pod \"51c69e69-78ac-4480-8007-1b306d4ef7bf\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.284138 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-libvirt-combined-ca-bundle\") pod \"51c69e69-78ac-4480-8007-1b306d4ef7bf\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.284178 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-libvirt-secret-0\") pod \"51c69e69-78ac-4480-8007-1b306d4ef7bf\" (UID: \"51c69e69-78ac-4480-8007-1b306d4ef7bf\") " Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.291059 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51c69e69-78ac-4480-8007-1b306d4ef7bf-kube-api-access-2x95r" (OuterVolumeSpecName: "kube-api-access-2x95r") pod "51c69e69-78ac-4480-8007-1b306d4ef7bf" (UID: "51c69e69-78ac-4480-8007-1b306d4ef7bf"). InnerVolumeSpecName "kube-api-access-2x95r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.294599 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "51c69e69-78ac-4480-8007-1b306d4ef7bf" (UID: "51c69e69-78ac-4480-8007-1b306d4ef7bf"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.298524 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-ceph" (OuterVolumeSpecName: "ceph") pod "51c69e69-78ac-4480-8007-1b306d4ef7bf" (UID: "51c69e69-78ac-4480-8007-1b306d4ef7bf"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.309490 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "51c69e69-78ac-4480-8007-1b306d4ef7bf" (UID: "51c69e69-78ac-4480-8007-1b306d4ef7bf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.311364 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "51c69e69-78ac-4480-8007-1b306d4ef7bf" (UID: "51c69e69-78ac-4480-8007-1b306d4ef7bf"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.314705 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-inventory" (OuterVolumeSpecName: "inventory") pod "51c69e69-78ac-4480-8007-1b306d4ef7bf" (UID: "51c69e69-78ac-4480-8007-1b306d4ef7bf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.389964 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.390000 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2x95r\" (UniqueName: \"kubernetes.io/projected/51c69e69-78ac-4480-8007-1b306d4ef7bf-kube-api-access-2x95r\") on node \"crc\" DevicePath \"\"" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.390016 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.390025 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.390034 4644 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.390047 4644 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/51c69e69-78ac-4480-8007-1b306d4ef7bf-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.915080 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" event={"ID":"51c69e69-78ac-4480-8007-1b306d4ef7bf","Type":"ContainerDied","Data":"f995a118ff0a56d0577913b909bb21eba5b8b158a9602632c4afc876aa58da77"} Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.915456 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f995a118ff0a56d0577913b909bb21eba5b8b158a9602632c4afc876aa58da77" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.915163 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.991389 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6"] Dec 13 07:27:09 crc kubenswrapper[4644]: E1213 07:27:09.991819 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51c69e69-78ac-4480-8007-1b306d4ef7bf" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.991836 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="51c69e69-78ac-4480-8007-1b306d4ef7bf" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.992008 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="51c69e69-78ac-4480-8007-1b306d4ef7bf" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.992685 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.994855 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.995179 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.995218 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.997287 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.997360 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.997392 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-b28p5" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.997503 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ceph-nova" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.997535 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 13 07:27:09 crc kubenswrapper[4644]: I1213 07:27:09.997544 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.004603 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6"] Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.105312 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.105374 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.105427 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.105514 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.105778 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.105845 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.105978 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.106012 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.106040 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64zbq\" (UniqueName: \"kubernetes.io/projected/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-kube-api-access-64zbq\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.106113 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.106286 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.208312 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.208370 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.208466 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.208502 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.208533 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64zbq\" (UniqueName: \"kubernetes.io/projected/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-kube-api-access-64zbq\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.208579 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.208728 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.208789 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.208820 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.208853 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.208903 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.210944 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.211067 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.215603 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.215836 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.216193 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.217903 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.218378 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.218541 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.218905 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.219692 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.224990 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64zbq\" (UniqueName: \"kubernetes.io/projected/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-kube-api-access-64zbq\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.307325 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.774798 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6"] Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.777652 4644 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 07:27:10 crc kubenswrapper[4644]: I1213 07:27:10.926754 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" event={"ID":"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c","Type":"ContainerStarted","Data":"064738cf8a3c8b7aeb20a7b917d713744cc046e08e3472495ebba79f90817538"} Dec 13 07:27:11 crc kubenswrapper[4644]: I1213 07:27:11.938624 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" event={"ID":"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c","Type":"ContainerStarted","Data":"05c777c1e4dd3894c895a45ddbb03fe3d1c743bfbafa69470feb6cc9c70fe3f9"} Dec 13 07:27:11 crc kubenswrapper[4644]: I1213 07:27:11.973941 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" podStartSLOduration=2.497304064 podStartE2EDuration="2.973914474s" podCreationTimestamp="2025-12-13 07:27:09 +0000 UTC" firstStartedPulling="2025-12-13 07:27:10.777340229 +0000 UTC m=+2492.992291062" lastFinishedPulling="2025-12-13 07:27:11.253950639 +0000 UTC m=+2493.468901472" observedRunningTime="2025-12-13 07:27:11.96357302 +0000 UTC m=+2494.178523853" watchObservedRunningTime="2025-12-13 07:27:11.973914474 +0000 UTC m=+2494.188865308" Dec 13 07:27:17 crc kubenswrapper[4644]: I1213 07:27:17.389790 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:27:17 crc kubenswrapper[4644]: E1213 07:27:17.390689 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:27:32 crc kubenswrapper[4644]: I1213 07:27:32.389522 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:27:32 crc kubenswrapper[4644]: E1213 07:27:32.390523 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:27:47 crc kubenswrapper[4644]: I1213 07:27:47.389721 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:27:47 crc kubenswrapper[4644]: E1213 07:27:47.390356 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:28:00 crc kubenswrapper[4644]: I1213 07:28:00.389967 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:28:00 crc kubenswrapper[4644]: E1213 07:28:00.390656 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:28:12 crc kubenswrapper[4644]: I1213 07:28:12.389772 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:28:12 crc kubenswrapper[4644]: E1213 07:28:12.390576 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:28:23 crc kubenswrapper[4644]: I1213 07:28:23.389499 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:28:23 crc kubenswrapper[4644]: E1213 07:28:23.390302 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:28:38 crc kubenswrapper[4644]: I1213 07:28:38.394320 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:28:38 crc kubenswrapper[4644]: E1213 07:28:38.395157 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:28:53 crc kubenswrapper[4644]: I1213 07:28:53.389387 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:28:53 crc kubenswrapper[4644]: I1213 07:28:53.719884 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerStarted","Data":"4f4b7bfe2613e6cda772a52ce14ce5049a7998ee5bbd61facee3957853dd67cc"} Dec 13 07:29:18 crc kubenswrapper[4644]: I1213 07:29:18.918942 4644 generic.go:334] "Generic (PLEG): container finished" podID="f123c7e1-05b6-4a07-a7e9-b9d0cf90792c" containerID="05c777c1e4dd3894c895a45ddbb03fe3d1c743bfbafa69470feb6cc9c70fe3f9" exitCode=0 Dec 13 07:29:18 crc kubenswrapper[4644]: I1213 07:29:18.919022 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" event={"ID":"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c","Type":"ContainerDied","Data":"05c777c1e4dd3894c895a45ddbb03fe3d1c743bfbafa69470feb6cc9c70fe3f9"} Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.225258 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.243160 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-inventory\") pod \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.243204 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-migration-ssh-key-0\") pod \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.243257 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64zbq\" (UniqueName: \"kubernetes.io/projected/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-kube-api-access-64zbq\") pod \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.243282 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-migration-ssh-key-1\") pod \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.243314 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ceph\") pod \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.243357 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-extra-config-0\") pod \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.243470 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-cell1-compute-config-1\") pod \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.243516 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ssh-key\") pod \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.243585 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-custom-ceph-combined-ca-bundle\") pod \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.243647 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-cell1-compute-config-0\") pod \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.243666 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ceph-nova-0\") pod \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\" (UID: \"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c\") " Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.248621 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-custom-ceph-combined-ca-bundle" (OuterVolumeSpecName: "nova-custom-ceph-combined-ca-bundle") pod "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c" (UID: "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c"). InnerVolumeSpecName "nova-custom-ceph-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.248627 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-kube-api-access-64zbq" (OuterVolumeSpecName: "kube-api-access-64zbq") pod "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c" (UID: "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c"). InnerVolumeSpecName "kube-api-access-64zbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.251531 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ceph" (OuterVolumeSpecName: "ceph") pod "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c" (UID: "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.264918 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c" (UID: "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.266166 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c" (UID: "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.267397 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c" (UID: "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.272905 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ceph-nova-0" (OuterVolumeSpecName: "ceph-nova-0") pod "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c" (UID: "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c"). InnerVolumeSpecName "ceph-nova-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.275857 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c" (UID: "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.278059 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c" (UID: "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.279675 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-inventory" (OuterVolumeSpecName: "inventory") pod "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c" (UID: "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.281503 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c" (UID: "f123c7e1-05b6-4a07-a7e9-b9d0cf90792c"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.346233 4644 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.346267 4644 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.346278 4644 reconciler_common.go:293] "Volume detached for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-custom-ceph-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.346287 4644 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.346299 4644 reconciler_common.go:293] "Volume detached for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ceph-nova-0\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.346308 4644 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-inventory\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.346316 4644 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.346323 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64zbq\" (UniqueName: \"kubernetes.io/projected/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-kube-api-access-64zbq\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.346331 4644 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.346338 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.346345 4644 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f123c7e1-05b6-4a07-a7e9-b9d0cf90792c-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.932750 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" event={"ID":"f123c7e1-05b6-4a07-a7e9-b9d0cf90792c","Type":"ContainerDied","Data":"064738cf8a3c8b7aeb20a7b917d713744cc046e08e3472495ebba79f90817538"} Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.932788 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="064738cf8a3c8b7aeb20a7b917d713744cc046e08e3472495ebba79f90817538" Dec 13 07:29:20 crc kubenswrapper[4644]: I1213 07:29:20.932977 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.762398 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 13 07:29:31 crc kubenswrapper[4644]: E1213 07:29:31.763369 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f123c7e1-05b6-4a07-a7e9-b9d0cf90792c" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.763385 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="f123c7e1-05b6-4a07-a7e9-b9d0cf90792c" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.763656 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="f123c7e1-05b6-4a07-a7e9-b9d0cf90792c" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.764600 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.769037 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.770032 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.777069 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.817425 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.818766 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.820601 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.835617 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/45f91b2c-2718-493d-815e-9f9d2f763143-config-data-custom\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.835655 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/45f91b2c-2718-493d-815e-9f9d2f763143-ceph\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.835772 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45f91b2c-2718-493d-815e-9f9d2f763143-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.835820 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.835846 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.835862 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh7qv\" (UniqueName: \"kubernetes.io/projected/aa86065a-5362-49a8-bc90-ea0a8495e132-kube-api-access-hh7qv\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.835895 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.835937 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-etc-nvme\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836000 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836022 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa86065a-5362-49a8-bc90-ea0a8495e132-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836044 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/aa86065a-5362-49a8-bc90-ea0a8495e132-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836067 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-sys\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836087 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836146 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836186 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836223 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-run\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836254 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836267 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836282 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-dev\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836302 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45f91b2c-2718-493d-815e-9f9d2f763143-config-data\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836328 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa86065a-5362-49a8-bc90-ea0a8495e132-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836353 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836370 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-run\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836392 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45f91b2c-2718-493d-815e-9f9d2f763143-scripts\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836413 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-sys\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836434 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836471 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94pch\" (UniqueName: \"kubernetes.io/projected/45f91b2c-2718-493d-815e-9f9d2f763143-kube-api-access-94pch\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836511 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836528 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa86065a-5362-49a8-bc90-ea0a8495e132-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836558 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa86065a-5362-49a8-bc90-ea0a8495e132-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836572 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-lib-modules\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.836593 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-dev\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.838286 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938505 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938558 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938596 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-run\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938622 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938635 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938652 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-dev\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938671 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45f91b2c-2718-493d-815e-9f9d2f763143-config-data\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938698 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa86065a-5362-49a8-bc90-ea0a8495e132-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938723 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938742 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-run\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938764 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45f91b2c-2718-493d-815e-9f9d2f763143-scripts\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938783 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-sys\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938803 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938806 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938827 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94pch\" (UniqueName: \"kubernetes.io/projected/45f91b2c-2718-493d-815e-9f9d2f763143-kube-api-access-94pch\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938898 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938919 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa86065a-5362-49a8-bc90-ea0a8495e132-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938957 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa86065a-5362-49a8-bc90-ea0a8495e132-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.938976 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-lib-modules\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939012 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-dev\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939059 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/45f91b2c-2718-493d-815e-9f9d2f763143-config-data-custom\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939085 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/45f91b2c-2718-493d-815e-9f9d2f763143-ceph\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939142 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45f91b2c-2718-493d-815e-9f9d2f763143-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939170 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939197 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939211 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh7qv\" (UniqueName: \"kubernetes.io/projected/aa86065a-5362-49a8-bc90-ea0a8495e132-kube-api-access-hh7qv\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939241 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939260 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-etc-nvme\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939281 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939296 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa86065a-5362-49a8-bc90-ea0a8495e132-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939327 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/aa86065a-5362-49a8-bc90-ea0a8495e132-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939352 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-sys\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939372 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939519 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939608 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939772 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.939845 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.940093 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.940162 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-etc-nvme\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.940262 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.940928 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.940991 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.941018 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-run\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.941047 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.941341 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-run\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.941391 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-lib-modules\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.941411 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-dev\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.941496 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/aa86065a-5362-49a8-bc90-ea0a8495e132-sys\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.941589 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.941594 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.941607 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-dev\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.941620 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/45f91b2c-2718-493d-815e-9f9d2f763143-sys\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.944624 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/45f91b2c-2718-493d-815e-9f9d2f763143-config-data-custom\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.945919 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/aa86065a-5362-49a8-bc90-ea0a8495e132-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.946293 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa86065a-5362-49a8-bc90-ea0a8495e132-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.946338 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa86065a-5362-49a8-bc90-ea0a8495e132-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.947076 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45f91b2c-2718-493d-815e-9f9d2f763143-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.948625 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/45f91b2c-2718-493d-815e-9f9d2f763143-ceph\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.948869 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa86065a-5362-49a8-bc90-ea0a8495e132-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.949225 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45f91b2c-2718-493d-815e-9f9d2f763143-config-data\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.949669 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45f91b2c-2718-493d-815e-9f9d2f763143-scripts\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.956097 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94pch\" (UniqueName: \"kubernetes.io/projected/45f91b2c-2718-493d-815e-9f9d2f763143-kube-api-access-94pch\") pod \"cinder-backup-0\" (UID: \"45f91b2c-2718-493d-815e-9f9d2f763143\") " pod="openstack/cinder-backup-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.956192 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh7qv\" (UniqueName: \"kubernetes.io/projected/aa86065a-5362-49a8-bc90-ea0a8495e132-kube-api-access-hh7qv\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:31 crc kubenswrapper[4644]: I1213 07:29:31.969582 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa86065a-5362-49a8-bc90-ea0a8495e132-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"aa86065a-5362-49a8-bc90-ea0a8495e132\") " pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.081970 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.136650 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.335741 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-zkrcn"] Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.337052 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-zkrcn" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.344208 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-zkrcn"] Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.346365 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbjc9\" (UniqueName: \"kubernetes.io/projected/6c34af67-d12c-49be-beed-0f83e5faa134-kube-api-access-kbjc9\") pod \"manila-db-create-zkrcn\" (UID: \"6c34af67-d12c-49be-beed-0f83e5faa134\") " pod="openstack/manila-db-create-zkrcn" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.346425 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c34af67-d12c-49be-beed-0f83e5faa134-operator-scripts\") pod \"manila-db-create-zkrcn\" (UID: \"6c34af67-d12c-49be-beed-0f83e5faa134\") " pod="openstack/manila-db-create-zkrcn" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.437345 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-a410-account-create-update-6z7z6"] Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.438375 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-a410-account-create-update-6z7z6" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.439877 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.448286 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5-operator-scripts\") pod \"manila-a410-account-create-update-6z7z6\" (UID: \"d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5\") " pod="openstack/manila-a410-account-create-update-6z7z6" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.448369 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbjc9\" (UniqueName: \"kubernetes.io/projected/6c34af67-d12c-49be-beed-0f83e5faa134-kube-api-access-kbjc9\") pod \"manila-db-create-zkrcn\" (UID: \"6c34af67-d12c-49be-beed-0f83e5faa134\") " pod="openstack/manila-db-create-zkrcn" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.448423 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnwnn\" (UniqueName: \"kubernetes.io/projected/d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5-kube-api-access-lnwnn\") pod \"manila-a410-account-create-update-6z7z6\" (UID: \"d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5\") " pod="openstack/manila-a410-account-create-update-6z7z6" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.448499 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c34af67-d12c-49be-beed-0f83e5faa134-operator-scripts\") pod \"manila-db-create-zkrcn\" (UID: \"6c34af67-d12c-49be-beed-0f83e5faa134\") " pod="openstack/manila-db-create-zkrcn" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.449969 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c34af67-d12c-49be-beed-0f83e5faa134-operator-scripts\") pod \"manila-db-create-zkrcn\" (UID: \"6c34af67-d12c-49be-beed-0f83e5faa134\") " pod="openstack/manila-db-create-zkrcn" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.453528 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-a410-account-create-update-6z7z6"] Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.466317 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbjc9\" (UniqueName: \"kubernetes.io/projected/6c34af67-d12c-49be-beed-0f83e5faa134-kube-api-access-kbjc9\") pod \"manila-db-create-zkrcn\" (UID: \"6c34af67-d12c-49be-beed-0f83e5faa134\") " pod="openstack/manila-db-create-zkrcn" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.550090 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnwnn\" (UniqueName: \"kubernetes.io/projected/d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5-kube-api-access-lnwnn\") pod \"manila-a410-account-create-update-6z7z6\" (UID: \"d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5\") " pod="openstack/manila-a410-account-create-update-6z7z6" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.550358 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5-operator-scripts\") pod \"manila-a410-account-create-update-6z7z6\" (UID: \"d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5\") " pod="openstack/manila-a410-account-create-update-6z7z6" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.551019 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5-operator-scripts\") pod \"manila-a410-account-create-update-6z7z6\" (UID: \"d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5\") " pod="openstack/manila-a410-account-create-update-6z7z6" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.567013 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnwnn\" (UniqueName: \"kubernetes.io/projected/d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5-kube-api-access-lnwnn\") pod \"manila-a410-account-create-update-6z7z6\" (UID: \"d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5\") " pod="openstack/manila-a410-account-create-update-6z7z6" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.577511 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.589497 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.594704 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.598725 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.598971 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.599923 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-58bbs" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.601888 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.630400 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.656676 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-zkrcn" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.662530 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.664012 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.665865 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.666036 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.673170 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.726159 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.754054 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.754122 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-config-data\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.754179 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-ceph\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.754505 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vf69x\" (UniqueName: \"kubernetes.io/projected/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-kube-api-access-vf69x\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.754632 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.754666 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-logs\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.754687 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.754831 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.754898 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-scripts\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.759403 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-a410-account-create-update-6z7z6" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857100 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-config-data\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857141 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857174 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-ceph\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857192 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857221 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857247 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxkz6\" (UniqueName: \"kubernetes.io/projected/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-kube-api-access-wxkz6\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857268 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857308 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vf69x\" (UniqueName: \"kubernetes.io/projected/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-kube-api-access-vf69x\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857347 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857361 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857376 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857392 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-ceph\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857408 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-logs\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857422 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857460 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-logs\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857482 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857511 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-scripts\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857534 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.857760 4644 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.858890 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-logs\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.859098 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.862059 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-scripts\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.862168 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-ceph\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.862305 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.862473 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-config-data\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.863983 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.871589 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vf69x\" (UniqueName: \"kubernetes.io/projected/7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3-kube-api-access-vf69x\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.884430 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3\") " pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.911865 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.960139 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.960426 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxkz6\" (UniqueName: \"kubernetes.io/projected/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-kube-api-access-wxkz6\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.960494 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.962658 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.962717 4644 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.962741 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.962771 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-ceph\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.962885 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-logs\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.963002 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.963054 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.964537 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-logs\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.966147 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.966474 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.967950 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.968036 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.972972 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-ceph\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.982018 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:32 crc kubenswrapper[4644]: I1213 07:29:32.983981 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxkz6\" (UniqueName: \"kubernetes.io/projected/d5415b9a-c4c6-4b46-beb9-6bc0fed2e723-kube-api-access-wxkz6\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:33 crc kubenswrapper[4644]: I1213 07:29:33.005522 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723\") " pod="openstack/glance-default-internal-api-0" Dec 13 07:29:33 crc kubenswrapper[4644]: I1213 07:29:33.019406 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"aa86065a-5362-49a8-bc90-ea0a8495e132","Type":"ContainerStarted","Data":"7c4eea143248c9a788a3c11bc5674f1bab4f75cb9ee64b064b8c71ff29838124"} Dec 13 07:29:33 crc kubenswrapper[4644]: I1213 07:29:33.020688 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"45f91b2c-2718-493d-815e-9f9d2f763143","Type":"ContainerStarted","Data":"268668b10318261f3bdedd39c55e29f50697d9836f1f09ec344a202c09d9d41d"} Dec 13 07:29:33 crc kubenswrapper[4644]: I1213 07:29:33.060785 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-zkrcn"] Dec 13 07:29:33 crc kubenswrapper[4644]: I1213 07:29:33.152367 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-a410-account-create-update-6z7z6"] Dec 13 07:29:33 crc kubenswrapper[4644]: I1213 07:29:33.286999 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 13 07:29:33 crc kubenswrapper[4644]: I1213 07:29:33.413810 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 13 07:29:33 crc kubenswrapper[4644]: W1213 07:29:33.418836 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a7a4f6d_247d_4d1c_9eb2_9a43ddd831b3.slice/crio-6dcd04f09be8de8e201aa541681b78e1d791000d03369d61f8d3bf48021cbfc8 WatchSource:0}: Error finding container 6dcd04f09be8de8e201aa541681b78e1d791000d03369d61f8d3bf48021cbfc8: Status 404 returned error can't find the container with id 6dcd04f09be8de8e201aa541681b78e1d791000d03369d61f8d3bf48021cbfc8 Dec 13 07:29:33 crc kubenswrapper[4644]: I1213 07:29:33.743013 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 13 07:29:33 crc kubenswrapper[4644]: W1213 07:29:33.747423 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5415b9a_c4c6_4b46_beb9_6bc0fed2e723.slice/crio-edb9c8559a1af20524a4c202aa987b3c369d8c622d70278ca0703622d5d4663e WatchSource:0}: Error finding container edb9c8559a1af20524a4c202aa987b3c369d8c622d70278ca0703622d5d4663e: Status 404 returned error can't find the container with id edb9c8559a1af20524a4c202aa987b3c369d8c622d70278ca0703622d5d4663e Dec 13 07:29:34 crc kubenswrapper[4644]: I1213 07:29:34.038665 4644 generic.go:334] "Generic (PLEG): container finished" podID="d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5" containerID="a8cb62ef866ff5bed5585c29ae1959f5da9757d2c4b154db02574ee882f41400" exitCode=0 Dec 13 07:29:34 crc kubenswrapper[4644]: I1213 07:29:34.038765 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-a410-account-create-update-6z7z6" event={"ID":"d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5","Type":"ContainerDied","Data":"a8cb62ef866ff5bed5585c29ae1959f5da9757d2c4b154db02574ee882f41400"} Dec 13 07:29:34 crc kubenswrapper[4644]: I1213 07:29:34.039064 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-a410-account-create-update-6z7z6" event={"ID":"d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5","Type":"ContainerStarted","Data":"1e0e4d135ebdcdc8a394c0b63d808779d1cdc138fbf9ed3147c5d55bf55dbb70"} Dec 13 07:29:34 crc kubenswrapper[4644]: I1213 07:29:34.042809 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723","Type":"ContainerStarted","Data":"edb9c8559a1af20524a4c202aa987b3c369d8c622d70278ca0703622d5d4663e"} Dec 13 07:29:34 crc kubenswrapper[4644]: I1213 07:29:34.044388 4644 generic.go:334] "Generic (PLEG): container finished" podID="6c34af67-d12c-49be-beed-0f83e5faa134" containerID="085bcebe1a4c25c261997f2a8f5e6b575910d73c97d03547623ffa2ca72d4fda" exitCode=0 Dec 13 07:29:34 crc kubenswrapper[4644]: I1213 07:29:34.044435 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-zkrcn" event={"ID":"6c34af67-d12c-49be-beed-0f83e5faa134","Type":"ContainerDied","Data":"085bcebe1a4c25c261997f2a8f5e6b575910d73c97d03547623ffa2ca72d4fda"} Dec 13 07:29:34 crc kubenswrapper[4644]: I1213 07:29:34.044471 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-zkrcn" event={"ID":"6c34af67-d12c-49be-beed-0f83e5faa134","Type":"ContainerStarted","Data":"b4cb06f4ade1d59d8f0316d81fa8f469a1a29eb79526a37afc8e6fe2197bdcd4"} Dec 13 07:29:34 crc kubenswrapper[4644]: I1213 07:29:34.051777 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3","Type":"ContainerStarted","Data":"b0d55700f4f55a33a87a8215d6dc49e4c67f860a08fd9d162bdfd5375214de4d"} Dec 13 07:29:34 crc kubenswrapper[4644]: I1213 07:29:34.051823 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3","Type":"ContainerStarted","Data":"6dcd04f09be8de8e201aa541681b78e1d791000d03369d61f8d3bf48021cbfc8"} Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.071150 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723","Type":"ContainerStarted","Data":"e7f307275ecc0b4c2da6eab5d46f1cbd1610d2dd26918af5e246ecf270e3181c"} Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.073517 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d5415b9a-c4c6-4b46-beb9-6bc0fed2e723","Type":"ContainerStarted","Data":"43d9a8f83da8463ba593dde21df8dfe57fa3a3a23b6ae823337477d88fcbd8d5"} Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.092772 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.092758078 podStartE2EDuration="4.092758078s" podCreationTimestamp="2025-12-13 07:29:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:29:35.092103308 +0000 UTC m=+2637.307054141" watchObservedRunningTime="2025-12-13 07:29:35.092758078 +0000 UTC m=+2637.307708912" Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.094687 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"aa86065a-5362-49a8-bc90-ea0a8495e132","Type":"ContainerStarted","Data":"98501bd70141912b07a5342808de86964361b0512629942d8876ec77c6cb20ed"} Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.094730 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"aa86065a-5362-49a8-bc90-ea0a8495e132","Type":"ContainerStarted","Data":"a11035307d28ef150f122c4b03cd0567c062101b2bdb0f34b9d8e1d868f131c2"} Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.106382 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"45f91b2c-2718-493d-815e-9f9d2f763143","Type":"ContainerStarted","Data":"6ad7d97d93644e2a496e0ae088313962d8422ea6c7add5cdf58bec9652784dab"} Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.106429 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"45f91b2c-2718-493d-815e-9f9d2f763143","Type":"ContainerStarted","Data":"8e880220ba0adc957ba2e760f1c44bd61d060e5e7009e0bb0def70280dbef056"} Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.110281 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3","Type":"ContainerStarted","Data":"b3d2bae37c123c213e881c33b0021a853c95715c82d362b9a63faf1cb860b7c9"} Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.128705 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=2.753711481 podStartE2EDuration="4.128691149s" podCreationTimestamp="2025-12-13 07:29:31 +0000 UTC" firstStartedPulling="2025-12-13 07:29:32.582140306 +0000 UTC m=+2634.797091139" lastFinishedPulling="2025-12-13 07:29:33.957119974 +0000 UTC m=+2636.172070807" observedRunningTime="2025-12-13 07:29:35.123684886 +0000 UTC m=+2637.338635719" watchObservedRunningTime="2025-12-13 07:29:35.128691149 +0000 UTC m=+2637.343641982" Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.148427 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=2.922438427 podStartE2EDuration="4.148411346s" podCreationTimestamp="2025-12-13 07:29:31 +0000 UTC" firstStartedPulling="2025-12-13 07:29:32.734635073 +0000 UTC m=+2634.949585916" lastFinishedPulling="2025-12-13 07:29:33.960608001 +0000 UTC m=+2636.175558835" observedRunningTime="2025-12-13 07:29:35.14119593 +0000 UTC m=+2637.356146763" watchObservedRunningTime="2025-12-13 07:29:35.148411346 +0000 UTC m=+2637.363362180" Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.162516 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.162499985 podStartE2EDuration="4.162499985s" podCreationTimestamp="2025-12-13 07:29:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:29:35.157752409 +0000 UTC m=+2637.372703241" watchObservedRunningTime="2025-12-13 07:29:35.162499985 +0000 UTC m=+2637.377450818" Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.436911 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-a410-account-create-update-6z7z6" Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.445484 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-zkrcn" Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.623498 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnwnn\" (UniqueName: \"kubernetes.io/projected/d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5-kube-api-access-lnwnn\") pod \"d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5\" (UID: \"d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5\") " Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.623605 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c34af67-d12c-49be-beed-0f83e5faa134-operator-scripts\") pod \"6c34af67-d12c-49be-beed-0f83e5faa134\" (UID: \"6c34af67-d12c-49be-beed-0f83e5faa134\") " Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.623695 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5-operator-scripts\") pod \"d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5\" (UID: \"d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5\") " Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.623720 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbjc9\" (UniqueName: \"kubernetes.io/projected/6c34af67-d12c-49be-beed-0f83e5faa134-kube-api-access-kbjc9\") pod \"6c34af67-d12c-49be-beed-0f83e5faa134\" (UID: \"6c34af67-d12c-49be-beed-0f83e5faa134\") " Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.625495 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c34af67-d12c-49be-beed-0f83e5faa134-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6c34af67-d12c-49be-beed-0f83e5faa134" (UID: "6c34af67-d12c-49be-beed-0f83e5faa134"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.625648 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5" (UID: "d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.630196 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5-kube-api-access-lnwnn" (OuterVolumeSpecName: "kube-api-access-lnwnn") pod "d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5" (UID: "d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5"). InnerVolumeSpecName "kube-api-access-lnwnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.630972 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c34af67-d12c-49be-beed-0f83e5faa134-kube-api-access-kbjc9" (OuterVolumeSpecName: "kube-api-access-kbjc9") pod "6c34af67-d12c-49be-beed-0f83e5faa134" (UID: "6c34af67-d12c-49be-beed-0f83e5faa134"). InnerVolumeSpecName "kube-api-access-kbjc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.727053 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c34af67-d12c-49be-beed-0f83e5faa134-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.727354 4644 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.727364 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbjc9\" (UniqueName: \"kubernetes.io/projected/6c34af67-d12c-49be-beed-0f83e5faa134-kube-api-access-kbjc9\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:35 crc kubenswrapper[4644]: I1213 07:29:35.727377 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnwnn\" (UniqueName: \"kubernetes.io/projected/d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5-kube-api-access-lnwnn\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:36 crc kubenswrapper[4644]: I1213 07:29:36.119105 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-a410-account-create-update-6z7z6" event={"ID":"d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5","Type":"ContainerDied","Data":"1e0e4d135ebdcdc8a394c0b63d808779d1cdc138fbf9ed3147c5d55bf55dbb70"} Dec 13 07:29:36 crc kubenswrapper[4644]: I1213 07:29:36.119150 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e0e4d135ebdcdc8a394c0b63d808779d1cdc138fbf9ed3147c5d55bf55dbb70" Dec 13 07:29:36 crc kubenswrapper[4644]: I1213 07:29:36.119213 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-a410-account-create-update-6z7z6" Dec 13 07:29:36 crc kubenswrapper[4644]: I1213 07:29:36.125289 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-zkrcn" Dec 13 07:29:36 crc kubenswrapper[4644]: I1213 07:29:36.126643 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-zkrcn" event={"ID":"6c34af67-d12c-49be-beed-0f83e5faa134","Type":"ContainerDied","Data":"b4cb06f4ade1d59d8f0316d81fa8f469a1a29eb79526a37afc8e6fe2197bdcd4"} Dec 13 07:29:36 crc kubenswrapper[4644]: I1213 07:29:36.126708 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4cb06f4ade1d59d8f0316d81fa8f469a1a29eb79526a37afc8e6fe2197bdcd4" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.082494 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.137751 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.758704 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-6vs42"] Dec 13 07:29:37 crc kubenswrapper[4644]: E1213 07:29:37.759931 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c34af67-d12c-49be-beed-0f83e5faa134" containerName="mariadb-database-create" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.759964 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c34af67-d12c-49be-beed-0f83e5faa134" containerName="mariadb-database-create" Dec 13 07:29:37 crc kubenswrapper[4644]: E1213 07:29:37.759989 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5" containerName="mariadb-account-create-update" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.759998 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5" containerName="mariadb-account-create-update" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.760365 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5" containerName="mariadb-account-create-update" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.760414 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c34af67-d12c-49be-beed-0f83e5faa134" containerName="mariadb-database-create" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.761717 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.763422 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-mgbmm" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.764775 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-6vs42"] Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.765011 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.876735 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-combined-ca-bundle\") pod \"manila-db-sync-6vs42\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.876911 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-config-data\") pod \"manila-db-sync-6vs42\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.876956 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-job-config-data\") pod \"manila-db-sync-6vs42\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.877415 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmbp7\" (UniqueName: \"kubernetes.io/projected/d158e5b2-13e4-4276-aba2-d5f061a842af-kube-api-access-rmbp7\") pod \"manila-db-sync-6vs42\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.979620 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-config-data\") pod \"manila-db-sync-6vs42\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.979683 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-job-config-data\") pod \"manila-db-sync-6vs42\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.980015 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmbp7\" (UniqueName: \"kubernetes.io/projected/d158e5b2-13e4-4276-aba2-d5f061a842af-kube-api-access-rmbp7\") pod \"manila-db-sync-6vs42\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.980092 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-combined-ca-bundle\") pod \"manila-db-sync-6vs42\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.985428 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-job-config-data\") pod \"manila-db-sync-6vs42\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.986196 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-config-data\") pod \"manila-db-sync-6vs42\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:37 crc kubenswrapper[4644]: I1213 07:29:37.990071 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-combined-ca-bundle\") pod \"manila-db-sync-6vs42\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:38 crc kubenswrapper[4644]: I1213 07:29:38.002205 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmbp7\" (UniqueName: \"kubernetes.io/projected/d158e5b2-13e4-4276-aba2-d5f061a842af-kube-api-access-rmbp7\") pod \"manila-db-sync-6vs42\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:38 crc kubenswrapper[4644]: I1213 07:29:38.083697 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:38 crc kubenswrapper[4644]: I1213 07:29:38.523030 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-6vs42"] Dec 13 07:29:38 crc kubenswrapper[4644]: W1213 07:29:38.526895 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd158e5b2_13e4_4276_aba2_d5f061a842af.slice/crio-9dc8707d10efadd2d5cd0b209f8bb9611a607580e6f583cf02551ef32c6066a6 WatchSource:0}: Error finding container 9dc8707d10efadd2d5cd0b209f8bb9611a607580e6f583cf02551ef32c6066a6: Status 404 returned error can't find the container with id 9dc8707d10efadd2d5cd0b209f8bb9611a607580e6f583cf02551ef32c6066a6 Dec 13 07:29:39 crc kubenswrapper[4644]: I1213 07:29:39.151032 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-6vs42" event={"ID":"d158e5b2-13e4-4276-aba2-d5f061a842af","Type":"ContainerStarted","Data":"9dc8707d10efadd2d5cd0b209f8bb9611a607580e6f583cf02551ef32c6066a6"} Dec 13 07:29:42 crc kubenswrapper[4644]: I1213 07:29:42.245071 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Dec 13 07:29:42 crc kubenswrapper[4644]: I1213 07:29:42.301992 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Dec 13 07:29:42 crc kubenswrapper[4644]: I1213 07:29:42.912689 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 13 07:29:42 crc kubenswrapper[4644]: I1213 07:29:42.912732 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 13 07:29:42 crc kubenswrapper[4644]: I1213 07:29:42.937828 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 13 07:29:42 crc kubenswrapper[4644]: I1213 07:29:42.943293 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 13 07:29:43 crc kubenswrapper[4644]: I1213 07:29:43.193710 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 13 07:29:43 crc kubenswrapper[4644]: I1213 07:29:43.194058 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 13 07:29:43 crc kubenswrapper[4644]: I1213 07:29:43.287610 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 13 07:29:43 crc kubenswrapper[4644]: I1213 07:29:43.287667 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 13 07:29:43 crc kubenswrapper[4644]: I1213 07:29:43.312363 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 13 07:29:43 crc kubenswrapper[4644]: I1213 07:29:43.335340 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 13 07:29:44 crc kubenswrapper[4644]: I1213 07:29:44.200221 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 13 07:29:44 crc kubenswrapper[4644]: I1213 07:29:44.200465 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 13 07:29:45 crc kubenswrapper[4644]: I1213 07:29:45.159658 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 13 07:29:45 crc kubenswrapper[4644]: I1213 07:29:45.170270 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 13 07:29:45 crc kubenswrapper[4644]: I1213 07:29:45.224118 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-6vs42" event={"ID":"d158e5b2-13e4-4276-aba2-d5f061a842af","Type":"ContainerStarted","Data":"f65b897336173d2e65e48b5b56b8fc0589295ab5f0afc7639a23b23002d8b069"} Dec 13 07:29:45 crc kubenswrapper[4644]: I1213 07:29:45.241924 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-6vs42" podStartSLOduration=2.984146059 podStartE2EDuration="8.241905669s" podCreationTimestamp="2025-12-13 07:29:37 +0000 UTC" firstStartedPulling="2025-12-13 07:29:38.529160745 +0000 UTC m=+2640.744111579" lastFinishedPulling="2025-12-13 07:29:43.786920356 +0000 UTC m=+2646.001871189" observedRunningTime="2025-12-13 07:29:45.235271837 +0000 UTC m=+2647.450222669" watchObservedRunningTime="2025-12-13 07:29:45.241905669 +0000 UTC m=+2647.456856502" Dec 13 07:29:45 crc kubenswrapper[4644]: I1213 07:29:45.965014 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 13 07:29:45 crc kubenswrapper[4644]: I1213 07:29:45.966663 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 13 07:29:48 crc kubenswrapper[4644]: I1213 07:29:48.247920 4644 generic.go:334] "Generic (PLEG): container finished" podID="d158e5b2-13e4-4276-aba2-d5f061a842af" containerID="f65b897336173d2e65e48b5b56b8fc0589295ab5f0afc7639a23b23002d8b069" exitCode=0 Dec 13 07:29:48 crc kubenswrapper[4644]: I1213 07:29:48.247999 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-6vs42" event={"ID":"d158e5b2-13e4-4276-aba2-d5f061a842af","Type":"ContainerDied","Data":"f65b897336173d2e65e48b5b56b8fc0589295ab5f0afc7639a23b23002d8b069"} Dec 13 07:29:49 crc kubenswrapper[4644]: I1213 07:29:49.559961 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:49 crc kubenswrapper[4644]: I1213 07:29:49.712606 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmbp7\" (UniqueName: \"kubernetes.io/projected/d158e5b2-13e4-4276-aba2-d5f061a842af-kube-api-access-rmbp7\") pod \"d158e5b2-13e4-4276-aba2-d5f061a842af\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " Dec 13 07:29:49 crc kubenswrapper[4644]: I1213 07:29:49.712816 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-combined-ca-bundle\") pod \"d158e5b2-13e4-4276-aba2-d5f061a842af\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " Dec 13 07:29:49 crc kubenswrapper[4644]: I1213 07:29:49.712925 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-config-data\") pod \"d158e5b2-13e4-4276-aba2-d5f061a842af\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " Dec 13 07:29:49 crc kubenswrapper[4644]: I1213 07:29:49.712955 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-job-config-data\") pod \"d158e5b2-13e4-4276-aba2-d5f061a842af\" (UID: \"d158e5b2-13e4-4276-aba2-d5f061a842af\") " Dec 13 07:29:49 crc kubenswrapper[4644]: I1213 07:29:49.718078 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d158e5b2-13e4-4276-aba2-d5f061a842af-kube-api-access-rmbp7" (OuterVolumeSpecName: "kube-api-access-rmbp7") pod "d158e5b2-13e4-4276-aba2-d5f061a842af" (UID: "d158e5b2-13e4-4276-aba2-d5f061a842af"). InnerVolumeSpecName "kube-api-access-rmbp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:29:49 crc kubenswrapper[4644]: I1213 07:29:49.718623 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "d158e5b2-13e4-4276-aba2-d5f061a842af" (UID: "d158e5b2-13e4-4276-aba2-d5f061a842af"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:49 crc kubenswrapper[4644]: I1213 07:29:49.720735 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-config-data" (OuterVolumeSpecName: "config-data") pod "d158e5b2-13e4-4276-aba2-d5f061a842af" (UID: "d158e5b2-13e4-4276-aba2-d5f061a842af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:49 crc kubenswrapper[4644]: I1213 07:29:49.733663 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d158e5b2-13e4-4276-aba2-d5f061a842af" (UID: "d158e5b2-13e4-4276-aba2-d5f061a842af"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:49 crc kubenswrapper[4644]: I1213 07:29:49.814856 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:49 crc kubenswrapper[4644]: I1213 07:29:49.814884 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:49 crc kubenswrapper[4644]: I1213 07:29:49.814893 4644 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/d158e5b2-13e4-4276-aba2-d5f061a842af-job-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:49 crc kubenswrapper[4644]: I1213 07:29:49.814905 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmbp7\" (UniqueName: \"kubernetes.io/projected/d158e5b2-13e4-4276-aba2-d5f061a842af-kube-api-access-rmbp7\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.264548 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-6vs42" event={"ID":"d158e5b2-13e4-4276-aba2-d5f061a842af","Type":"ContainerDied","Data":"9dc8707d10efadd2d5cd0b209f8bb9611a607580e6f583cf02551ef32c6066a6"} Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.264825 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9dc8707d10efadd2d5cd0b209f8bb9611a607580e6f583cf02551ef32c6066a6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.264612 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-6vs42" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.477088 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 07:29:50 crc kubenswrapper[4644]: E1213 07:29:50.477426 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d158e5b2-13e4-4276-aba2-d5f061a842af" containerName="manila-db-sync" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.477457 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="d158e5b2-13e4-4276-aba2-d5f061a842af" containerName="manila-db-sync" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.477662 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="d158e5b2-13e4-4276-aba2-d5f061a842af" containerName="manila-db-sync" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.478528 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.495571 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.495906 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-mgbmm" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.495976 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.496102 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.503873 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.511770 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.513512 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.515568 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.528954 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.567001 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67d475fdcf-drmz6"] Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.568396 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.596994 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67d475fdcf-drmz6"] Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.630076 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sfkt\" (UniqueName: \"kubernetes.io/projected/ac722a47-a17b-43ff-93ee-c240f65592cc-kube-api-access-6sfkt\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.630243 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/336b6941-d78a-455f-8b49-60dc81de435a-ovsdbserver-sb\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.630394 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/336b6941-d78a-455f-8b49-60dc81de435a-openstack-edpm-ipam\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.630536 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-scripts\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.630687 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/336b6941-d78a-455f-8b49-60dc81de435a-dns-svc\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.630734 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.630757 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336b6941-d78a-455f-8b49-60dc81de435a-config\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.630782 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-config-data\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.630853 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-config-data\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.630880 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/336b6941-d78a-455f-8b49-60dc81de435a-ovsdbserver-nb\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.630953 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-scripts\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.631031 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88gz8\" (UniqueName: \"kubernetes.io/projected/336b6941-d78a-455f-8b49-60dc81de435a-kube-api-access-88gz8\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.631113 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8v4q\" (UniqueName: \"kubernetes.io/projected/292f2b2c-6449-4899-83fd-7c5dd3632696-kube-api-access-l8v4q\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.631182 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/ac722a47-a17b-43ff-93ee-c240f65592cc-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.631248 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.631385 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ac722a47-a17b-43ff-93ee-c240f65592cc-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.631513 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ac722a47-a17b-43ff-93ee-c240f65592cc-ceph\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.631578 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/292f2b2c-6449-4899-83fd-7c5dd3632696-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.631597 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.631637 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.733437 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/336b6941-d78a-455f-8b49-60dc81de435a-ovsdbserver-sb\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.733551 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/336b6941-d78a-455f-8b49-60dc81de435a-openstack-edpm-ipam\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.733583 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-scripts\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.733600 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.733651 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/336b6941-d78a-455f-8b49-60dc81de435a-dns-svc\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.733673 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.733692 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336b6941-d78a-455f-8b49-60dc81de435a-config\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.733716 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-config-data\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.733777 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-config-data\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.733801 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/336b6941-d78a-455f-8b49-60dc81de435a-ovsdbserver-nb\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.733865 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-scripts\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.733888 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88gz8\" (UniqueName: \"kubernetes.io/projected/336b6941-d78a-455f-8b49-60dc81de435a-kube-api-access-88gz8\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.733922 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8v4q\" (UniqueName: \"kubernetes.io/projected/292f2b2c-6449-4899-83fd-7c5dd3632696-kube-api-access-l8v4q\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.733953 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/ac722a47-a17b-43ff-93ee-c240f65592cc-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.733981 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.734111 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ac722a47-a17b-43ff-93ee-c240f65592cc-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.734136 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ac722a47-a17b-43ff-93ee-c240f65592cc-ceph\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.734181 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/292f2b2c-6449-4899-83fd-7c5dd3632696-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.734203 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.734236 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.734277 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sfkt\" (UniqueName: \"kubernetes.io/projected/ac722a47-a17b-43ff-93ee-c240f65592cc-kube-api-access-6sfkt\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.735880 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ac722a47-a17b-43ff-93ee-c240f65592cc-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.735972 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/ac722a47-a17b-43ff-93ee-c240f65592cc-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.736374 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/292f2b2c-6449-4899-83fd-7c5dd3632696-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.737432 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/336b6941-d78a-455f-8b49-60dc81de435a-ovsdbserver-sb\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.737664 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.741824 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-scripts\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.743492 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ac722a47-a17b-43ff-93ee-c240f65592cc-ceph\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.743835 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.744018 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/336b6941-d78a-455f-8b49-60dc81de435a-dns-svc\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.744827 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/336b6941-d78a-455f-8b49-60dc81de435a-ovsdbserver-nb\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.744903 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/336b6941-d78a-455f-8b49-60dc81de435a-openstack-edpm-ipam\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.747247 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336b6941-d78a-455f-8b49-60dc81de435a-config\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.752179 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-config-data\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.754122 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-scripts\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.762477 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.762520 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88gz8\" (UniqueName: \"kubernetes.io/projected/336b6941-d78a-455f-8b49-60dc81de435a-kube-api-access-88gz8\") pod \"dnsmasq-dns-67d475fdcf-drmz6\" (UID: \"336b6941-d78a-455f-8b49-60dc81de435a\") " pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.762729 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.762950 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.763340 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.763459 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sfkt\" (UniqueName: \"kubernetes.io/projected/ac722a47-a17b-43ff-93ee-c240f65592cc-kube-api-access-6sfkt\") pod \"manila-share-share1-0\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.764005 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-config-data\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.767892 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8v4q\" (UniqueName: \"kubernetes.io/projected/292f2b2c-6449-4899-83fd-7c5dd3632696-kube-api-access-l8v4q\") pod \"manila-scheduler-0\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.775092 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.805368 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.828729 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.840180 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.840648 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-config-data\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.840690 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-etc-machine-id\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.840727 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-logs\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.840754 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-scripts\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.840792 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-config-data-custom\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.840866 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5cwp\" (UniqueName: \"kubernetes.io/projected/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-kube-api-access-q5cwp\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.884294 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.944640 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-logs\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.944687 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-scripts\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.944732 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-config-data-custom\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.945807 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-logs\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.945834 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5cwp\" (UniqueName: \"kubernetes.io/projected/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-kube-api-access-q5cwp\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.945965 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.946000 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-config-data\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.946036 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-etc-machine-id\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.946123 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-etc-machine-id\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.955661 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-config-data-custom\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.956712 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-scripts\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.966545 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.968064 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5cwp\" (UniqueName: \"kubernetes.io/projected/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-kube-api-access-q5cwp\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:50 crc kubenswrapper[4644]: I1213 07:29:50.983923 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-config-data\") pod \"manila-api-0\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " pod="openstack/manila-api-0" Dec 13 07:29:51 crc kubenswrapper[4644]: I1213 07:29:51.281118 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 13 07:29:51 crc kubenswrapper[4644]: I1213 07:29:51.311357 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 07:29:51 crc kubenswrapper[4644]: W1213 07:29:51.323985 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod292f2b2c_6449_4899_83fd_7c5dd3632696.slice/crio-d270226d3996ca1297ebeb8245739526c7d710ccaabb7af33e7dfa0faf4c1bcd WatchSource:0}: Error finding container d270226d3996ca1297ebeb8245739526c7d710ccaabb7af33e7dfa0faf4c1bcd: Status 404 returned error can't find the container with id d270226d3996ca1297ebeb8245739526c7d710ccaabb7af33e7dfa0faf4c1bcd Dec 13 07:29:51 crc kubenswrapper[4644]: I1213 07:29:51.452763 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 07:29:51 crc kubenswrapper[4644]: W1213 07:29:51.453940 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac722a47_a17b_43ff_93ee_c240f65592cc.slice/crio-6d93304fb30f55a1e12d36a3e0e2242992bf9c7c0859c553e2c1185aca65f66d WatchSource:0}: Error finding container 6d93304fb30f55a1e12d36a3e0e2242992bf9c7c0859c553e2c1185aca65f66d: Status 404 returned error can't find the container with id 6d93304fb30f55a1e12d36a3e0e2242992bf9c7c0859c553e2c1185aca65f66d Dec 13 07:29:51 crc kubenswrapper[4644]: I1213 07:29:51.464240 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67d475fdcf-drmz6"] Dec 13 07:29:51 crc kubenswrapper[4644]: I1213 07:29:51.810863 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 13 07:29:51 crc kubenswrapper[4644]: W1213 07:29:51.813487 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9dff1202_a70d_4fa8_89b6_0c8ba66790b1.slice/crio-45e7e525e9c297739a85eabe98f6158943321bacf4c5f69c47ed795c9ed87bf1 WatchSource:0}: Error finding container 45e7e525e9c297739a85eabe98f6158943321bacf4c5f69c47ed795c9ed87bf1: Status 404 returned error can't find the container with id 45e7e525e9c297739a85eabe98f6158943321bacf4c5f69c47ed795c9ed87bf1 Dec 13 07:29:52 crc kubenswrapper[4644]: I1213 07:29:52.289241 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"9dff1202-a70d-4fa8-89b6-0c8ba66790b1","Type":"ContainerStarted","Data":"83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305"} Dec 13 07:29:52 crc kubenswrapper[4644]: I1213 07:29:52.289951 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"9dff1202-a70d-4fa8-89b6-0c8ba66790b1","Type":"ContainerStarted","Data":"45e7e525e9c297739a85eabe98f6158943321bacf4c5f69c47ed795c9ed87bf1"} Dec 13 07:29:52 crc kubenswrapper[4644]: I1213 07:29:52.291381 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"292f2b2c-6449-4899-83fd-7c5dd3632696","Type":"ContainerStarted","Data":"d270226d3996ca1297ebeb8245739526c7d710ccaabb7af33e7dfa0faf4c1bcd"} Dec 13 07:29:52 crc kubenswrapper[4644]: I1213 07:29:52.292854 4644 generic.go:334] "Generic (PLEG): container finished" podID="336b6941-d78a-455f-8b49-60dc81de435a" containerID="f3d93caa52fe174f08a67f1480d22f2826eaa498856745a4ba12fb0b78ee1887" exitCode=0 Dec 13 07:29:52 crc kubenswrapper[4644]: I1213 07:29:52.292909 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" event={"ID":"336b6941-d78a-455f-8b49-60dc81de435a","Type":"ContainerDied","Data":"f3d93caa52fe174f08a67f1480d22f2826eaa498856745a4ba12fb0b78ee1887"} Dec 13 07:29:52 crc kubenswrapper[4644]: I1213 07:29:52.292925 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" event={"ID":"336b6941-d78a-455f-8b49-60dc81de435a","Type":"ContainerStarted","Data":"fe48a1264521e1a5a7907fa666980029d58b68bdea070849da594ec067969e20"} Dec 13 07:29:52 crc kubenswrapper[4644]: I1213 07:29:52.293691 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"ac722a47-a17b-43ff-93ee-c240f65592cc","Type":"ContainerStarted","Data":"6d93304fb30f55a1e12d36a3e0e2242992bf9c7c0859c553e2c1185aca65f66d"} Dec 13 07:29:53 crc kubenswrapper[4644]: I1213 07:29:53.189768 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Dec 13 07:29:53 crc kubenswrapper[4644]: I1213 07:29:53.302049 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"9dff1202-a70d-4fa8-89b6-0c8ba66790b1","Type":"ContainerStarted","Data":"853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74"} Dec 13 07:29:53 crc kubenswrapper[4644]: I1213 07:29:53.303361 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"292f2b2c-6449-4899-83fd-7c5dd3632696","Type":"ContainerStarted","Data":"ec29595ae8aad706b582735ef13a4ebabe6471ee15626b930afe073d8d29ac79"} Dec 13 07:29:53 crc kubenswrapper[4644]: I1213 07:29:53.303413 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"292f2b2c-6449-4899-83fd-7c5dd3632696","Type":"ContainerStarted","Data":"372bbb832fe45cdb676bc352cb1796e775e87dbaa148ad0f4a2c34ab4e71abc2"} Dec 13 07:29:53 crc kubenswrapper[4644]: I1213 07:29:53.304797 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" event={"ID":"336b6941-d78a-455f-8b49-60dc81de435a","Type":"ContainerStarted","Data":"3b9e9aa9e94b9e7e51ebcfb726403aaf4c2c2d61a8f11cc016cbf9ac4b4b3d0a"} Dec 13 07:29:53 crc kubenswrapper[4644]: I1213 07:29:53.305036 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:29:53 crc kubenswrapper[4644]: I1213 07:29:53.320146 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=3.320128745 podStartE2EDuration="3.320128745s" podCreationTimestamp="2025-12-13 07:29:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:29:53.315622213 +0000 UTC m=+2655.530573046" watchObservedRunningTime="2025-12-13 07:29:53.320128745 +0000 UTC m=+2655.535079579" Dec 13 07:29:53 crc kubenswrapper[4644]: I1213 07:29:53.338814 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=2.396979917 podStartE2EDuration="3.338798378s" podCreationTimestamp="2025-12-13 07:29:50 +0000 UTC" firstStartedPulling="2025-12-13 07:29:51.327012565 +0000 UTC m=+2653.541963398" lastFinishedPulling="2025-12-13 07:29:52.268831025 +0000 UTC m=+2654.483781859" observedRunningTime="2025-12-13 07:29:53.332809999 +0000 UTC m=+2655.547760831" watchObservedRunningTime="2025-12-13 07:29:53.338798378 +0000 UTC m=+2655.553749210" Dec 13 07:29:53 crc kubenswrapper[4644]: I1213 07:29:53.350715 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" podStartSLOduration=3.350700735 podStartE2EDuration="3.350700735s" podCreationTimestamp="2025-12-13 07:29:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:29:53.349232486 +0000 UTC m=+2655.564183319" watchObservedRunningTime="2025-12-13 07:29:53.350700735 +0000 UTC m=+2655.565651569" Dec 13 07:29:54 crc kubenswrapper[4644]: I1213 07:29:54.312336 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="9dff1202-a70d-4fa8-89b6-0c8ba66790b1" containerName="manila-api-log" containerID="cri-o://83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305" gracePeriod=30 Dec 13 07:29:54 crc kubenswrapper[4644]: I1213 07:29:54.312619 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="9dff1202-a70d-4fa8-89b6-0c8ba66790b1" containerName="manila-api" containerID="cri-o://853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74" gracePeriod=30 Dec 13 07:29:54 crc kubenswrapper[4644]: I1213 07:29:54.312588 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Dec 13 07:29:54 crc kubenswrapper[4644]: I1213 07:29:54.568006 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:29:54 crc kubenswrapper[4644]: I1213 07:29:54.568246 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="ceilometer-central-agent" containerID="cri-o://6b28384b35a84f36ef44c38fb5854e57d4286eb11c3508fb01aa72f6006fb757" gracePeriod=30 Dec 13 07:29:54 crc kubenswrapper[4644]: I1213 07:29:54.568317 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="proxy-httpd" containerID="cri-o://c2b239327faaa7cba97565ba270ded26adfb4e3686407790983e9f8fddc11507" gracePeriod=30 Dec 13 07:29:54 crc kubenswrapper[4644]: I1213 07:29:54.568350 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="ceilometer-notification-agent" containerID="cri-o://28ce5e978887acf34be2d4cacbc886c7a64466edfd473c97f732a1bb15811c53" gracePeriod=30 Dec 13 07:29:54 crc kubenswrapper[4644]: I1213 07:29:54.568319 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="sg-core" containerID="cri-o://df1112b11e8df28beec36049f33498ab6b3d1993e8c2f119c1e032b7db27cc5f" gracePeriod=30 Dec 13 07:29:54 crc kubenswrapper[4644]: I1213 07:29:54.907636 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.047581 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-config-data-custom\") pod \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.047620 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-etc-machine-id\") pod \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.047736 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5cwp\" (UniqueName: \"kubernetes.io/projected/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-kube-api-access-q5cwp\") pod \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.047762 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-combined-ca-bundle\") pod \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.047813 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-scripts\") pod \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.047870 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-logs\") pod \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.047899 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-config-data\") pod \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\" (UID: \"9dff1202-a70d-4fa8-89b6-0c8ba66790b1\") " Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.051116 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-logs" (OuterVolumeSpecName: "logs") pod "9dff1202-a70d-4fa8-89b6-0c8ba66790b1" (UID: "9dff1202-a70d-4fa8-89b6-0c8ba66790b1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.051189 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9dff1202-a70d-4fa8-89b6-0c8ba66790b1" (UID: "9dff1202-a70d-4fa8-89b6-0c8ba66790b1"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.054528 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9dff1202-a70d-4fa8-89b6-0c8ba66790b1" (UID: "9dff1202-a70d-4fa8-89b6-0c8ba66790b1"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.054572 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-scripts" (OuterVolumeSpecName: "scripts") pod "9dff1202-a70d-4fa8-89b6-0c8ba66790b1" (UID: "9dff1202-a70d-4fa8-89b6-0c8ba66790b1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.056981 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-kube-api-access-q5cwp" (OuterVolumeSpecName: "kube-api-access-q5cwp") pod "9dff1202-a70d-4fa8-89b6-0c8ba66790b1" (UID: "9dff1202-a70d-4fa8-89b6-0c8ba66790b1"). InnerVolumeSpecName "kube-api-access-q5cwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.077137 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9dff1202-a70d-4fa8-89b6-0c8ba66790b1" (UID: "9dff1202-a70d-4fa8-89b6-0c8ba66790b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.100884 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-config-data" (OuterVolumeSpecName: "config-data") pod "9dff1202-a70d-4fa8-89b6-0c8ba66790b1" (UID: "9dff1202-a70d-4fa8-89b6-0c8ba66790b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.150908 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5cwp\" (UniqueName: \"kubernetes.io/projected/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-kube-api-access-q5cwp\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.150934 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.150961 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.150971 4644 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-logs\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.150979 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.150987 4644 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.150995 4644 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9dff1202-a70d-4fa8-89b6-0c8ba66790b1-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.322714 4644 generic.go:334] "Generic (PLEG): container finished" podID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerID="c2b239327faaa7cba97565ba270ded26adfb4e3686407790983e9f8fddc11507" exitCode=0 Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.322743 4644 generic.go:334] "Generic (PLEG): container finished" podID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerID="df1112b11e8df28beec36049f33498ab6b3d1993e8c2f119c1e032b7db27cc5f" exitCode=2 Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.322752 4644 generic.go:334] "Generic (PLEG): container finished" podID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerID="6b28384b35a84f36ef44c38fb5854e57d4286eb11c3508fb01aa72f6006fb757" exitCode=0 Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.322802 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be90da58-f6f5-4c6d-9f0a-1ddd56500d66","Type":"ContainerDied","Data":"c2b239327faaa7cba97565ba270ded26adfb4e3686407790983e9f8fddc11507"} Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.322828 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be90da58-f6f5-4c6d-9f0a-1ddd56500d66","Type":"ContainerDied","Data":"df1112b11e8df28beec36049f33498ab6b3d1993e8c2f119c1e032b7db27cc5f"} Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.322838 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be90da58-f6f5-4c6d-9f0a-1ddd56500d66","Type":"ContainerDied","Data":"6b28384b35a84f36ef44c38fb5854e57d4286eb11c3508fb01aa72f6006fb757"} Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.326463 4644 generic.go:334] "Generic (PLEG): container finished" podID="9dff1202-a70d-4fa8-89b6-0c8ba66790b1" containerID="853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74" exitCode=0 Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.326490 4644 generic.go:334] "Generic (PLEG): container finished" podID="9dff1202-a70d-4fa8-89b6-0c8ba66790b1" containerID="83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305" exitCode=143 Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.326521 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"9dff1202-a70d-4fa8-89b6-0c8ba66790b1","Type":"ContainerDied","Data":"853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74"} Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.326537 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.326555 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"9dff1202-a70d-4fa8-89b6-0c8ba66790b1","Type":"ContainerDied","Data":"83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305"} Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.326566 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"9dff1202-a70d-4fa8-89b6-0c8ba66790b1","Type":"ContainerDied","Data":"45e7e525e9c297739a85eabe98f6158943321bacf4c5f69c47ed795c9ed87bf1"} Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.326581 4644 scope.go:117] "RemoveContainer" containerID="853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.366255 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.375949 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-api-0"] Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.383801 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Dec 13 07:29:55 crc kubenswrapper[4644]: E1213 07:29:55.384178 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dff1202-a70d-4fa8-89b6-0c8ba66790b1" containerName="manila-api-log" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.384198 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dff1202-a70d-4fa8-89b6-0c8ba66790b1" containerName="manila-api-log" Dec 13 07:29:55 crc kubenswrapper[4644]: E1213 07:29:55.384215 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dff1202-a70d-4fa8-89b6-0c8ba66790b1" containerName="manila-api" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.384221 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dff1202-a70d-4fa8-89b6-0c8ba66790b1" containerName="manila-api" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.384459 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dff1202-a70d-4fa8-89b6-0c8ba66790b1" containerName="manila-api" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.384480 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dff1202-a70d-4fa8-89b6-0c8ba66790b1" containerName="manila-api-log" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.385394 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.389990 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-public-svc" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.390190 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.390310 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-internal-svc" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.392738 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.403141 4644 scope.go:117] "RemoveContainer" containerID="83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.429214 4644 scope.go:117] "RemoveContainer" containerID="853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74" Dec 13 07:29:55 crc kubenswrapper[4644]: E1213 07:29:55.429610 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74\": container with ID starting with 853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74 not found: ID does not exist" containerID="853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.429644 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74"} err="failed to get container status \"853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74\": rpc error: code = NotFound desc = could not find container \"853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74\": container with ID starting with 853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74 not found: ID does not exist" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.429666 4644 scope.go:117] "RemoveContainer" containerID="83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305" Dec 13 07:29:55 crc kubenswrapper[4644]: E1213 07:29:55.429885 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305\": container with ID starting with 83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305 not found: ID does not exist" containerID="83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.429907 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305"} err="failed to get container status \"83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305\": rpc error: code = NotFound desc = could not find container \"83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305\": container with ID starting with 83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305 not found: ID does not exist" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.429920 4644 scope.go:117] "RemoveContainer" containerID="853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.430209 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74"} err="failed to get container status \"853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74\": rpc error: code = NotFound desc = could not find container \"853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74\": container with ID starting with 853b9611c3da7636cd73ffe1c328e9375f4cbef3e3869968958ccb2a5e6b0b74 not found: ID does not exist" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.430230 4644 scope.go:117] "RemoveContainer" containerID="83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.430433 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305"} err="failed to get container status \"83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305\": rpc error: code = NotFound desc = could not find container \"83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305\": container with ID starting with 83ba45574b09212c33cbc31c705fbd2bc3faf184ab53d080b1dd18bdcff81305 not found: ID does not exist" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.456600 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-config-data-custom\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.456837 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.456987 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6564caaf-8916-4f38-9bfd-c70c46b28887-logs\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.457041 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6564caaf-8916-4f38-9bfd-c70c46b28887-etc-machine-id\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.457107 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-scripts\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.457222 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-config-data\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.457361 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-internal-tls-certs\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.457384 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqzfm\" (UniqueName: \"kubernetes.io/projected/6564caaf-8916-4f38-9bfd-c70c46b28887-kube-api-access-bqzfm\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.457407 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-public-tls-certs\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.560135 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-config-data\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.560425 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-internal-tls-certs\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.560559 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqzfm\" (UniqueName: \"kubernetes.io/projected/6564caaf-8916-4f38-9bfd-c70c46b28887-kube-api-access-bqzfm\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.560665 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-public-tls-certs\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.560759 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-config-data-custom\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.560918 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.561054 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6564caaf-8916-4f38-9bfd-c70c46b28887-logs\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.561133 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6564caaf-8916-4f38-9bfd-c70c46b28887-etc-machine-id\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.561206 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-scripts\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.561309 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6564caaf-8916-4f38-9bfd-c70c46b28887-etc-machine-id\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.561309 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6564caaf-8916-4f38-9bfd-c70c46b28887-logs\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.564837 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-internal-tls-certs\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.567787 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-config-data-custom\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.568221 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-public-tls-certs\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.568247 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-config-data\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.570968 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-scripts\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.571933 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6564caaf-8916-4f38-9bfd-c70c46b28887-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.579150 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqzfm\" (UniqueName: \"kubernetes.io/projected/6564caaf-8916-4f38-9bfd-c70c46b28887-kube-api-access-bqzfm\") pod \"manila-api-0\" (UID: \"6564caaf-8916-4f38-9bfd-c70c46b28887\") " pod="openstack/manila-api-0" Dec 13 07:29:55 crc kubenswrapper[4644]: I1213 07:29:55.708204 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.168651 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.350541 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"6564caaf-8916-4f38-9bfd-c70c46b28887","Type":"ContainerStarted","Data":"5974bc1526490040d39e2f9f156144543a050bd3607e62dac280f06a571eb66b"} Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.357777 4644 generic.go:334] "Generic (PLEG): container finished" podID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerID="28ce5e978887acf34be2d4cacbc886c7a64466edfd473c97f732a1bb15811c53" exitCode=0 Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.357843 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be90da58-f6f5-4c6d-9f0a-1ddd56500d66","Type":"ContainerDied","Data":"28ce5e978887acf34be2d4cacbc886c7a64466edfd473c97f732a1bb15811c53"} Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.398697 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dff1202-a70d-4fa8-89b6-0c8ba66790b1" path="/var/lib/kubelet/pods/9dff1202-a70d-4fa8-89b6-0c8ba66790b1/volumes" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.697059 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.790363 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-run-httpd\") pod \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.790496 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btz42\" (UniqueName: \"kubernetes.io/projected/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-kube-api-access-btz42\") pod \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.790539 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-combined-ca-bundle\") pod \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.790560 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-sg-core-conf-yaml\") pod \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.790580 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-ceilometer-tls-certs\") pod \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.790614 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-scripts\") pod \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.790636 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-config-data\") pod \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.790720 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-log-httpd\") pod \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\" (UID: \"be90da58-f6f5-4c6d-9f0a-1ddd56500d66\") " Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.791165 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "be90da58-f6f5-4c6d-9f0a-1ddd56500d66" (UID: "be90da58-f6f5-4c6d-9f0a-1ddd56500d66"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.791421 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "be90da58-f6f5-4c6d-9f0a-1ddd56500d66" (UID: "be90da58-f6f5-4c6d-9f0a-1ddd56500d66"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.791527 4644 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.794528 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-scripts" (OuterVolumeSpecName: "scripts") pod "be90da58-f6f5-4c6d-9f0a-1ddd56500d66" (UID: "be90da58-f6f5-4c6d-9f0a-1ddd56500d66"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.796359 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-kube-api-access-btz42" (OuterVolumeSpecName: "kube-api-access-btz42") pod "be90da58-f6f5-4c6d-9f0a-1ddd56500d66" (UID: "be90da58-f6f5-4c6d-9f0a-1ddd56500d66"). InnerVolumeSpecName "kube-api-access-btz42". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.823803 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "be90da58-f6f5-4c6d-9f0a-1ddd56500d66" (UID: "be90da58-f6f5-4c6d-9f0a-1ddd56500d66"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.830512 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "be90da58-f6f5-4c6d-9f0a-1ddd56500d66" (UID: "be90da58-f6f5-4c6d-9f0a-1ddd56500d66"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.846662 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be90da58-f6f5-4c6d-9f0a-1ddd56500d66" (UID: "be90da58-f6f5-4c6d-9f0a-1ddd56500d66"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.877014 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-config-data" (OuterVolumeSpecName: "config-data") pod "be90da58-f6f5-4c6d-9f0a-1ddd56500d66" (UID: "be90da58-f6f5-4c6d-9f0a-1ddd56500d66"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.893261 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.893289 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.893299 4644 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.893309 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btz42\" (UniqueName: \"kubernetes.io/projected/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-kube-api-access-btz42\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.893318 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.893328 4644 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:56 crc kubenswrapper[4644]: I1213 07:29:56.893336 4644 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/be90da58-f6f5-4c6d-9f0a-1ddd56500d66-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.375833 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"6564caaf-8916-4f38-9bfd-c70c46b28887","Type":"ContainerStarted","Data":"5a2f816e0f1559ac2c886a2fd835113d252041a5eec990c152b0e7c4941c1cf9"} Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.376419 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.376633 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"6564caaf-8916-4f38-9bfd-c70c46b28887","Type":"ContainerStarted","Data":"be7365bcfb60e24addaa46942bc045641fba0e1bdc1f8bdb460e02bbeb1f156d"} Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.380043 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"be90da58-f6f5-4c6d-9f0a-1ddd56500d66","Type":"ContainerDied","Data":"02d992f6979288bcd86acf9f15ee22b5390b6f84624744080a99b4c25f43efb1"} Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.380104 4644 scope.go:117] "RemoveContainer" containerID="c2b239327faaa7cba97565ba270ded26adfb4e3686407790983e9f8fddc11507" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.380135 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.403024 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=2.403006166 podStartE2EDuration="2.403006166s" podCreationTimestamp="2025-12-13 07:29:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:29:57.396463193 +0000 UTC m=+2659.611414027" watchObservedRunningTime="2025-12-13 07:29:57.403006166 +0000 UTC m=+2659.617957019" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.423326 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.435525 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.445056 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:29:57 crc kubenswrapper[4644]: E1213 07:29:57.445582 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="ceilometer-notification-agent" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.445632 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="ceilometer-notification-agent" Dec 13 07:29:57 crc kubenswrapper[4644]: E1213 07:29:57.445663 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="proxy-httpd" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.445670 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="proxy-httpd" Dec 13 07:29:57 crc kubenswrapper[4644]: E1213 07:29:57.445708 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="ceilometer-central-agent" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.445715 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="ceilometer-central-agent" Dec 13 07:29:57 crc kubenswrapper[4644]: E1213 07:29:57.445726 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="sg-core" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.445731 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="sg-core" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.445922 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="ceilometer-notification-agent" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.445938 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="proxy-httpd" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.445945 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="sg-core" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.445961 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" containerName="ceilometer-central-agent" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.448032 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.449810 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.449884 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.450056 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.451349 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.608753 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-log-httpd\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.609055 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.609171 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.609335 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22m4p\" (UniqueName: \"kubernetes.io/projected/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-kube-api-access-22m4p\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.609489 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-scripts\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.609617 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-config-data\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.609758 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-run-httpd\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.609818 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.712511 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-log-httpd\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.712620 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.712675 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.712870 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22m4p\" (UniqueName: \"kubernetes.io/projected/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-kube-api-access-22m4p\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.712979 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-scripts\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.713033 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-log-httpd\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.713119 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-config-data\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.713165 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-run-httpd\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.713218 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.713906 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-run-httpd\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.718805 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.718820 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-scripts\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.718859 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-config-data\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.719577 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.720115 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.727904 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22m4p\" (UniqueName: \"kubernetes.io/projected/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-kube-api-access-22m4p\") pod \"ceilometer-0\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " pod="openstack/ceilometer-0" Dec 13 07:29:57 crc kubenswrapper[4644]: I1213 07:29:57.802148 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:29:58 crc kubenswrapper[4644]: I1213 07:29:58.401899 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be90da58-f6f5-4c6d-9f0a-1ddd56500d66" path="/var/lib/kubelet/pods/be90da58-f6f5-4c6d-9f0a-1ddd56500d66/volumes" Dec 13 07:29:59 crc kubenswrapper[4644]: I1213 07:29:59.551472 4644 scope.go:117] "RemoveContainer" containerID="df1112b11e8df28beec36049f33498ab6b3d1993e8c2f119c1e032b7db27cc5f" Dec 13 07:29:59 crc kubenswrapper[4644]: I1213 07:29:59.593154 4644 scope.go:117] "RemoveContainer" containerID="28ce5e978887acf34be2d4cacbc886c7a64466edfd473c97f732a1bb15811c53" Dec 13 07:29:59 crc kubenswrapper[4644]: I1213 07:29:59.682751 4644 scope.go:117] "RemoveContainer" containerID="6b28384b35a84f36ef44c38fb5854e57d4286eb11c3508fb01aa72f6006fb757" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:29:59.999895 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:30:00 crc kubenswrapper[4644]: W1213 07:30:00.012805 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36f6c0c4_95b7_4edf_9054_c4d4dc671ae7.slice/crio-b32d41deaef1ca26365f2e5df88f39939b6c2030b8ea375221c9448f42e3c1fd WatchSource:0}: Error finding container b32d41deaef1ca26365f2e5df88f39939b6c2030b8ea375221c9448f42e3c1fd: Status 404 returned error can't find the container with id b32d41deaef1ca26365f2e5df88f39939b6c2030b8ea375221c9448f42e3c1fd Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.127434 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f"] Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.130428 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.134978 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.135579 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.152842 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f"] Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.277127 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpf7t\" (UniqueName: \"kubernetes.io/projected/1024461e-3f2a-4de2-83be-c0899d822cce-kube-api-access-wpf7t\") pod \"collect-profiles-29426850-6266f\" (UID: \"1024461e-3f2a-4de2-83be-c0899d822cce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.277209 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1024461e-3f2a-4de2-83be-c0899d822cce-secret-volume\") pod \"collect-profiles-29426850-6266f\" (UID: \"1024461e-3f2a-4de2-83be-c0899d822cce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.277617 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1024461e-3f2a-4de2-83be-c0899d822cce-config-volume\") pod \"collect-profiles-29426850-6266f\" (UID: \"1024461e-3f2a-4de2-83be-c0899d822cce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.379810 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpf7t\" (UniqueName: \"kubernetes.io/projected/1024461e-3f2a-4de2-83be-c0899d822cce-kube-api-access-wpf7t\") pod \"collect-profiles-29426850-6266f\" (UID: \"1024461e-3f2a-4de2-83be-c0899d822cce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.380086 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1024461e-3f2a-4de2-83be-c0899d822cce-secret-volume\") pod \"collect-profiles-29426850-6266f\" (UID: \"1024461e-3f2a-4de2-83be-c0899d822cce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.380217 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1024461e-3f2a-4de2-83be-c0899d822cce-config-volume\") pod \"collect-profiles-29426850-6266f\" (UID: \"1024461e-3f2a-4de2-83be-c0899d822cce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.380979 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1024461e-3f2a-4de2-83be-c0899d822cce-config-volume\") pod \"collect-profiles-29426850-6266f\" (UID: \"1024461e-3f2a-4de2-83be-c0899d822cce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.385340 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1024461e-3f2a-4de2-83be-c0899d822cce-secret-volume\") pod \"collect-profiles-29426850-6266f\" (UID: \"1024461e-3f2a-4de2-83be-c0899d822cce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.393220 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpf7t\" (UniqueName: \"kubernetes.io/projected/1024461e-3f2a-4de2-83be-c0899d822cce-kube-api-access-wpf7t\") pod \"collect-profiles-29426850-6266f\" (UID: \"1024461e-3f2a-4de2-83be-c0899d822cce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.407881 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"ac722a47-a17b-43ff-93ee-c240f65592cc","Type":"ContainerStarted","Data":"cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7"} Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.407940 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"ac722a47-a17b-43ff-93ee-c240f65592cc","Type":"ContainerStarted","Data":"ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987"} Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.413897 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7","Type":"ContainerStarted","Data":"b32d41deaef1ca26365f2e5df88f39939b6c2030b8ea375221c9448f42e3c1fd"} Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.439223 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=2.298667172 podStartE2EDuration="10.439210431s" podCreationTimestamp="2025-12-13 07:29:50 +0000 UTC" firstStartedPulling="2025-12-13 07:29:51.455766685 +0000 UTC m=+2653.670717518" lastFinishedPulling="2025-12-13 07:29:59.596309943 +0000 UTC m=+2661.811260777" observedRunningTime="2025-12-13 07:30:00.431766505 +0000 UTC m=+2662.646717348" watchObservedRunningTime="2025-12-13 07:30:00.439210431 +0000 UTC m=+2662.654161264" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.458173 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.806215 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.831393 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.875009 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f"] Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.886731 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67d475fdcf-drmz6" Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.949219 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dfc84b95f-9lh9x"] Dec 13 07:30:00 crc kubenswrapper[4644]: I1213 07:30:00.950584 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" podUID="c6b6b66f-5b04-4c2e-8c8c-bb8c21198597" containerName="dnsmasq-dns" containerID="cri-o://8acb6f988f074775bfa1ec565084c6b72d32422160eb795fd2ab12f4dcda841d" gracePeriod=10 Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.390258 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.443670 4644 generic.go:334] "Generic (PLEG): container finished" podID="c6b6b66f-5b04-4c2e-8c8c-bb8c21198597" containerID="8acb6f988f074775bfa1ec565084c6b72d32422160eb795fd2ab12f4dcda841d" exitCode=0 Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.443754 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" event={"ID":"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597","Type":"ContainerDied","Data":"8acb6f988f074775bfa1ec565084c6b72d32422160eb795fd2ab12f4dcda841d"} Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.443776 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" event={"ID":"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597","Type":"ContainerDied","Data":"d1fdebdb9213a5cf6153a9ad35edce34a828f7c1bb2bd03068e2ecf0e91979af"} Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.443785 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dfc84b95f-9lh9x" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.443792 4644 scope.go:117] "RemoveContainer" containerID="8acb6f988f074775bfa1ec565084c6b72d32422160eb795fd2ab12f4dcda841d" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.448661 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7","Type":"ContainerStarted","Data":"eeb5fbb63d3af77219a3583eb45f12ca803e63d9d47a230dd5787d55f86055d8"} Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.450773 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" event={"ID":"1024461e-3f2a-4de2-83be-c0899d822cce","Type":"ContainerStarted","Data":"f2d5131365c4612ec025c63f7d0dc7794025aee338a414cfd5db7caed8be4d11"} Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.450930 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" event={"ID":"1024461e-3f2a-4de2-83be-c0899d822cce","Type":"ContainerStarted","Data":"3424a4b322dfa8415d311748791ca106d4c6a30e4f0bbcd0b694a8e1cd603807"} Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.476999 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" podStartSLOduration=1.476981158 podStartE2EDuration="1.476981158s" podCreationTimestamp="2025-12-13 07:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:30:01.467942473 +0000 UTC m=+2663.682893307" watchObservedRunningTime="2025-12-13 07:30:01.476981158 +0000 UTC m=+2663.691931990" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.486231 4644 scope.go:117] "RemoveContainer" containerID="595e3290c9d6ff654cbdf4ed4cf50210226f490aa0b270da249150d33c5063a6" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.504808 4644 scope.go:117] "RemoveContainer" containerID="8acb6f988f074775bfa1ec565084c6b72d32422160eb795fd2ab12f4dcda841d" Dec 13 07:30:01 crc kubenswrapper[4644]: E1213 07:30:01.505252 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8acb6f988f074775bfa1ec565084c6b72d32422160eb795fd2ab12f4dcda841d\": container with ID starting with 8acb6f988f074775bfa1ec565084c6b72d32422160eb795fd2ab12f4dcda841d not found: ID does not exist" containerID="8acb6f988f074775bfa1ec565084c6b72d32422160eb795fd2ab12f4dcda841d" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.505331 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8acb6f988f074775bfa1ec565084c6b72d32422160eb795fd2ab12f4dcda841d"} err="failed to get container status \"8acb6f988f074775bfa1ec565084c6b72d32422160eb795fd2ab12f4dcda841d\": rpc error: code = NotFound desc = could not find container \"8acb6f988f074775bfa1ec565084c6b72d32422160eb795fd2ab12f4dcda841d\": container with ID starting with 8acb6f988f074775bfa1ec565084c6b72d32422160eb795fd2ab12f4dcda841d not found: ID does not exist" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.505382 4644 scope.go:117] "RemoveContainer" containerID="595e3290c9d6ff654cbdf4ed4cf50210226f490aa0b270da249150d33c5063a6" Dec 13 07:30:01 crc kubenswrapper[4644]: E1213 07:30:01.505778 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"595e3290c9d6ff654cbdf4ed4cf50210226f490aa0b270da249150d33c5063a6\": container with ID starting with 595e3290c9d6ff654cbdf4ed4cf50210226f490aa0b270da249150d33c5063a6 not found: ID does not exist" containerID="595e3290c9d6ff654cbdf4ed4cf50210226f490aa0b270da249150d33c5063a6" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.505867 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"595e3290c9d6ff654cbdf4ed4cf50210226f490aa0b270da249150d33c5063a6"} err="failed to get container status \"595e3290c9d6ff654cbdf4ed4cf50210226f490aa0b270da249150d33c5063a6\": rpc error: code = NotFound desc = could not find container \"595e3290c9d6ff654cbdf4ed4cf50210226f490aa0b270da249150d33c5063a6\": container with ID starting with 595e3290c9d6ff654cbdf4ed4cf50210226f490aa0b270da249150d33c5063a6 not found: ID does not exist" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.530415 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwlf2\" (UniqueName: \"kubernetes.io/projected/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-kube-api-access-lwlf2\") pod \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.530582 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-config\") pod \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.530740 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-ovsdbserver-nb\") pod \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.530942 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-openstack-edpm-ipam\") pod \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.531010 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-ovsdbserver-sb\") pod \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.531161 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-dns-svc\") pod \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\" (UID: \"c6b6b66f-5b04-4c2e-8c8c-bb8c21198597\") " Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.535966 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-kube-api-access-lwlf2" (OuterVolumeSpecName: "kube-api-access-lwlf2") pod "c6b6b66f-5b04-4c2e-8c8c-bb8c21198597" (UID: "c6b6b66f-5b04-4c2e-8c8c-bb8c21198597"). InnerVolumeSpecName "kube-api-access-lwlf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.538030 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwlf2\" (UniqueName: \"kubernetes.io/projected/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-kube-api-access-lwlf2\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.572134 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c6b6b66f-5b04-4c2e-8c8c-bb8c21198597" (UID: "c6b6b66f-5b04-4c2e-8c8c-bb8c21198597"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.572146 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "c6b6b66f-5b04-4c2e-8c8c-bb8c21198597" (UID: "c6b6b66f-5b04-4c2e-8c8c-bb8c21198597"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.577407 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c6b6b66f-5b04-4c2e-8c8c-bb8c21198597" (UID: "c6b6b66f-5b04-4c2e-8c8c-bb8c21198597"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.577515 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-config" (OuterVolumeSpecName: "config") pod "c6b6b66f-5b04-4c2e-8c8c-bb8c21198597" (UID: "c6b6b66f-5b04-4c2e-8c8c-bb8c21198597"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.586921 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c6b6b66f-5b04-4c2e-8c8c-bb8c21198597" (UID: "c6b6b66f-5b04-4c2e-8c8c-bb8c21198597"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.645748 4644 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.645846 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.645860 4644 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.645871 4644 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.645900 4644 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.776023 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dfc84b95f-9lh9x"] Dec 13 07:30:01 crc kubenswrapper[4644]: I1213 07:30:01.783609 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5dfc84b95f-9lh9x"] Dec 13 07:30:02 crc kubenswrapper[4644]: I1213 07:30:02.398469 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6b6b66f-5b04-4c2e-8c8c-bb8c21198597" path="/var/lib/kubelet/pods/c6b6b66f-5b04-4c2e-8c8c-bb8c21198597/volumes" Dec 13 07:30:02 crc kubenswrapper[4644]: I1213 07:30:02.462855 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7","Type":"ContainerStarted","Data":"e9d273fc319662b954b92d6af5bc260e3b81503e3b52e02140d18cb14171d87c"} Dec 13 07:30:02 crc kubenswrapper[4644]: I1213 07:30:02.465113 4644 generic.go:334] "Generic (PLEG): container finished" podID="1024461e-3f2a-4de2-83be-c0899d822cce" containerID="f2d5131365c4612ec025c63f7d0dc7794025aee338a414cfd5db7caed8be4d11" exitCode=0 Dec 13 07:30:02 crc kubenswrapper[4644]: I1213 07:30:02.465162 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" event={"ID":"1024461e-3f2a-4de2-83be-c0899d822cce","Type":"ContainerDied","Data":"f2d5131365c4612ec025c63f7d0dc7794025aee338a414cfd5db7caed8be4d11"} Dec 13 07:30:03 crc kubenswrapper[4644]: I1213 07:30:03.516566 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7","Type":"ContainerStarted","Data":"e24e853afc8bfb844a90985934299b44d3d1e8468f508310d700eab2d410f69b"} Dec 13 07:30:03 crc kubenswrapper[4644]: I1213 07:30:03.627538 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:30:04 crc kubenswrapper[4644]: I1213 07:30:03.852844 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" Dec 13 07:30:04 crc kubenswrapper[4644]: I1213 07:30:04.017462 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1024461e-3f2a-4de2-83be-c0899d822cce-config-volume\") pod \"1024461e-3f2a-4de2-83be-c0899d822cce\" (UID: \"1024461e-3f2a-4de2-83be-c0899d822cce\") " Dec 13 07:30:04 crc kubenswrapper[4644]: I1213 07:30:04.017569 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpf7t\" (UniqueName: \"kubernetes.io/projected/1024461e-3f2a-4de2-83be-c0899d822cce-kube-api-access-wpf7t\") pod \"1024461e-3f2a-4de2-83be-c0899d822cce\" (UID: \"1024461e-3f2a-4de2-83be-c0899d822cce\") " Dec 13 07:30:04 crc kubenswrapper[4644]: I1213 07:30:04.017617 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1024461e-3f2a-4de2-83be-c0899d822cce-secret-volume\") pod \"1024461e-3f2a-4de2-83be-c0899d822cce\" (UID: \"1024461e-3f2a-4de2-83be-c0899d822cce\") " Dec 13 07:30:04 crc kubenswrapper[4644]: I1213 07:30:04.018399 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1024461e-3f2a-4de2-83be-c0899d822cce-config-volume" (OuterVolumeSpecName: "config-volume") pod "1024461e-3f2a-4de2-83be-c0899d822cce" (UID: "1024461e-3f2a-4de2-83be-c0899d822cce"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:30:04 crc kubenswrapper[4644]: I1213 07:30:04.025364 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1024461e-3f2a-4de2-83be-c0899d822cce-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1024461e-3f2a-4de2-83be-c0899d822cce" (UID: "1024461e-3f2a-4de2-83be-c0899d822cce"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:30:04 crc kubenswrapper[4644]: I1213 07:30:04.031570 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1024461e-3f2a-4de2-83be-c0899d822cce-kube-api-access-wpf7t" (OuterVolumeSpecName: "kube-api-access-wpf7t") pod "1024461e-3f2a-4de2-83be-c0899d822cce" (UID: "1024461e-3f2a-4de2-83be-c0899d822cce"). InnerVolumeSpecName "kube-api-access-wpf7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:30:04 crc kubenswrapper[4644]: I1213 07:30:04.121831 4644 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1024461e-3f2a-4de2-83be-c0899d822cce-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:04 crc kubenswrapper[4644]: I1213 07:30:04.121865 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpf7t\" (UniqueName: \"kubernetes.io/projected/1024461e-3f2a-4de2-83be-c0899d822cce-kube-api-access-wpf7t\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:04 crc kubenswrapper[4644]: I1213 07:30:04.121879 4644 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1024461e-3f2a-4de2-83be-c0899d822cce-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:04 crc kubenswrapper[4644]: I1213 07:30:04.527354 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" event={"ID":"1024461e-3f2a-4de2-83be-c0899d822cce","Type":"ContainerDied","Data":"3424a4b322dfa8415d311748791ca106d4c6a30e4f0bbcd0b694a8e1cd603807"} Dec 13 07:30:04 crc kubenswrapper[4644]: I1213 07:30:04.528850 4644 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3424a4b322dfa8415d311748791ca106d4c6a30e4f0bbcd0b694a8e1cd603807" Dec 13 07:30:04 crc kubenswrapper[4644]: I1213 07:30:04.528707 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426850-6266f" Dec 13 07:30:04 crc kubenswrapper[4644]: I1213 07:30:04.560831 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g"] Dec 13 07:30:04 crc kubenswrapper[4644]: I1213 07:30:04.568390 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426805-wg25g"] Dec 13 07:30:05 crc kubenswrapper[4644]: I1213 07:30:05.538963 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7","Type":"ContainerStarted","Data":"bc39298bde6d469acaca8fcdfecbb00170bb1422eb784797e2cb1cfcb49f00ee"} Dec 13 07:30:05 crc kubenswrapper[4644]: I1213 07:30:05.539291 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 07:30:05 crc kubenswrapper[4644]: I1213 07:30:05.539078 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="ceilometer-central-agent" containerID="cri-o://eeb5fbb63d3af77219a3583eb45f12ca803e63d9d47a230dd5787d55f86055d8" gracePeriod=30 Dec 13 07:30:05 crc kubenswrapper[4644]: I1213 07:30:05.539395 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="proxy-httpd" containerID="cri-o://bc39298bde6d469acaca8fcdfecbb00170bb1422eb784797e2cb1cfcb49f00ee" gracePeriod=30 Dec 13 07:30:05 crc kubenswrapper[4644]: I1213 07:30:05.539473 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="ceilometer-notification-agent" containerID="cri-o://e9d273fc319662b954b92d6af5bc260e3b81503e3b52e02140d18cb14171d87c" gracePeriod=30 Dec 13 07:30:05 crc kubenswrapper[4644]: I1213 07:30:05.539497 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="sg-core" containerID="cri-o://e24e853afc8bfb844a90985934299b44d3d1e8468f508310d700eab2d410f69b" gracePeriod=30 Dec 13 07:30:05 crc kubenswrapper[4644]: I1213 07:30:05.561426 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.299637421 podStartE2EDuration="8.561406099s" podCreationTimestamp="2025-12-13 07:29:57 +0000 UTC" firstStartedPulling="2025-12-13 07:30:00.015589982 +0000 UTC m=+2662.230540815" lastFinishedPulling="2025-12-13 07:30:05.27735866 +0000 UTC m=+2667.492309493" observedRunningTime="2025-12-13 07:30:05.55528512 +0000 UTC m=+2667.770235953" watchObservedRunningTime="2025-12-13 07:30:05.561406099 +0000 UTC m=+2667.776356931" Dec 13 07:30:06 crc kubenswrapper[4644]: I1213 07:30:06.401483 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8aab518-1735-49f5-98af-68d01e096132" path="/var/lib/kubelet/pods/f8aab518-1735-49f5-98af-68d01e096132/volumes" Dec 13 07:30:06 crc kubenswrapper[4644]: I1213 07:30:06.549377 4644 generic.go:334] "Generic (PLEG): container finished" podID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerID="e24e853afc8bfb844a90985934299b44d3d1e8468f508310d700eab2d410f69b" exitCode=2 Dec 13 07:30:06 crc kubenswrapper[4644]: I1213 07:30:06.549407 4644 generic.go:334] "Generic (PLEG): container finished" podID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerID="e9d273fc319662b954b92d6af5bc260e3b81503e3b52e02140d18cb14171d87c" exitCode=0 Dec 13 07:30:06 crc kubenswrapper[4644]: I1213 07:30:06.549415 4644 generic.go:334] "Generic (PLEG): container finished" podID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerID="eeb5fbb63d3af77219a3583eb45f12ca803e63d9d47a230dd5787d55f86055d8" exitCode=0 Dec 13 07:30:06 crc kubenswrapper[4644]: I1213 07:30:06.549408 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7","Type":"ContainerDied","Data":"e24e853afc8bfb844a90985934299b44d3d1e8468f508310d700eab2d410f69b"} Dec 13 07:30:06 crc kubenswrapper[4644]: I1213 07:30:06.549468 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7","Type":"ContainerDied","Data":"e9d273fc319662b954b92d6af5bc260e3b81503e3b52e02140d18cb14171d87c"} Dec 13 07:30:06 crc kubenswrapper[4644]: I1213 07:30:06.549483 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7","Type":"ContainerDied","Data":"eeb5fbb63d3af77219a3583eb45f12ca803e63d9d47a230dd5787d55f86055d8"} Dec 13 07:30:12 crc kubenswrapper[4644]: I1213 07:30:12.036165 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Dec 13 07:30:12 crc kubenswrapper[4644]: I1213 07:30:12.068873 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Dec 13 07:30:12 crc kubenswrapper[4644]: I1213 07:30:12.084738 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 07:30:12 crc kubenswrapper[4644]: I1213 07:30:12.119181 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 07:30:12 crc kubenswrapper[4644]: I1213 07:30:12.598281 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="ac722a47-a17b-43ff-93ee-c240f65592cc" containerName="manila-share" containerID="cri-o://ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987" gracePeriod=30 Dec 13 07:30:12 crc kubenswrapper[4644]: I1213 07:30:12.598620 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="ac722a47-a17b-43ff-93ee-c240f65592cc" containerName="probe" containerID="cri-o://cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7" gracePeriod=30 Dec 13 07:30:12 crc kubenswrapper[4644]: I1213 07:30:12.598985 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="292f2b2c-6449-4899-83fd-7c5dd3632696" containerName="manila-scheduler" containerID="cri-o://372bbb832fe45cdb676bc352cb1796e775e87dbaa148ad0f4a2c34ab4e71abc2" gracePeriod=30 Dec 13 07:30:12 crc kubenswrapper[4644]: I1213 07:30:12.599047 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="292f2b2c-6449-4899-83fd-7c5dd3632696" containerName="probe" containerID="cri-o://ec29595ae8aad706b582735ef13a4ebabe6471ee15626b930afe073d8d29ac79" gracePeriod=30 Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.483914 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.529713 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-config-data\") pod \"ac722a47-a17b-43ff-93ee-c240f65592cc\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.529876 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-scripts\") pod \"ac722a47-a17b-43ff-93ee-c240f65592cc\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.530029 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-config-data-custom\") pod \"ac722a47-a17b-43ff-93ee-c240f65592cc\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.530106 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6sfkt\" (UniqueName: \"kubernetes.io/projected/ac722a47-a17b-43ff-93ee-c240f65592cc-kube-api-access-6sfkt\") pod \"ac722a47-a17b-43ff-93ee-c240f65592cc\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.530249 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ac722a47-a17b-43ff-93ee-c240f65592cc-etc-machine-id\") pod \"ac722a47-a17b-43ff-93ee-c240f65592cc\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.530402 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ac722a47-a17b-43ff-93ee-c240f65592cc-ceph\") pod \"ac722a47-a17b-43ff-93ee-c240f65592cc\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.530530 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-combined-ca-bundle\") pod \"ac722a47-a17b-43ff-93ee-c240f65592cc\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.530397 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac722a47-a17b-43ff-93ee-c240f65592cc-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ac722a47-a17b-43ff-93ee-c240f65592cc" (UID: "ac722a47-a17b-43ff-93ee-c240f65592cc"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.530581 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/ac722a47-a17b-43ff-93ee-c240f65592cc-var-lib-manila\") pod \"ac722a47-a17b-43ff-93ee-c240f65592cc\" (UID: \"ac722a47-a17b-43ff-93ee-c240f65592cc\") " Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.530715 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac722a47-a17b-43ff-93ee-c240f65592cc-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "ac722a47-a17b-43ff-93ee-c240f65592cc" (UID: "ac722a47-a17b-43ff-93ee-c240f65592cc"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.531360 4644 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ac722a47-a17b-43ff-93ee-c240f65592cc-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.531383 4644 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/ac722a47-a17b-43ff-93ee-c240f65592cc-var-lib-manila\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.536467 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac722a47-a17b-43ff-93ee-c240f65592cc-kube-api-access-6sfkt" (OuterVolumeSpecName: "kube-api-access-6sfkt") pod "ac722a47-a17b-43ff-93ee-c240f65592cc" (UID: "ac722a47-a17b-43ff-93ee-c240f65592cc"). InnerVolumeSpecName "kube-api-access-6sfkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.537602 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-scripts" (OuterVolumeSpecName: "scripts") pod "ac722a47-a17b-43ff-93ee-c240f65592cc" (UID: "ac722a47-a17b-43ff-93ee-c240f65592cc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.537651 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ac722a47-a17b-43ff-93ee-c240f65592cc" (UID: "ac722a47-a17b-43ff-93ee-c240f65592cc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.539346 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac722a47-a17b-43ff-93ee-c240f65592cc-ceph" (OuterVolumeSpecName: "ceph") pod "ac722a47-a17b-43ff-93ee-c240f65592cc" (UID: "ac722a47-a17b-43ff-93ee-c240f65592cc"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.577220 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ac722a47-a17b-43ff-93ee-c240f65592cc" (UID: "ac722a47-a17b-43ff-93ee-c240f65592cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.607123 4644 generic.go:334] "Generic (PLEG): container finished" podID="ac722a47-a17b-43ff-93ee-c240f65592cc" containerID="cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7" exitCode=0 Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.607157 4644 generic.go:334] "Generic (PLEG): container finished" podID="ac722a47-a17b-43ff-93ee-c240f65592cc" containerID="ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987" exitCode=1 Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.607183 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.607217 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"ac722a47-a17b-43ff-93ee-c240f65592cc","Type":"ContainerDied","Data":"cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7"} Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.607272 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"ac722a47-a17b-43ff-93ee-c240f65592cc","Type":"ContainerDied","Data":"ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987"} Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.607284 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"ac722a47-a17b-43ff-93ee-c240f65592cc","Type":"ContainerDied","Data":"6d93304fb30f55a1e12d36a3e0e2242992bf9c7c0859c553e2c1185aca65f66d"} Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.607300 4644 scope.go:117] "RemoveContainer" containerID="cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.609664 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-config-data" (OuterVolumeSpecName: "config-data") pod "ac722a47-a17b-43ff-93ee-c240f65592cc" (UID: "ac722a47-a17b-43ff-93ee-c240f65592cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.611174 4644 generic.go:334] "Generic (PLEG): container finished" podID="292f2b2c-6449-4899-83fd-7c5dd3632696" containerID="ec29595ae8aad706b582735ef13a4ebabe6471ee15626b930afe073d8d29ac79" exitCode=0 Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.611209 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"292f2b2c-6449-4899-83fd-7c5dd3632696","Type":"ContainerDied","Data":"ec29595ae8aad706b582735ef13a4ebabe6471ee15626b930afe073d8d29ac79"} Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.629728 4644 scope.go:117] "RemoveContainer" containerID="ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.634994 4644 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ac722a47-a17b-43ff-93ee-c240f65592cc-ceph\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.635021 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.635034 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.635047 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.635057 4644 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac722a47-a17b-43ff-93ee-c240f65592cc-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.635066 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6sfkt\" (UniqueName: \"kubernetes.io/projected/ac722a47-a17b-43ff-93ee-c240f65592cc-kube-api-access-6sfkt\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.649957 4644 scope.go:117] "RemoveContainer" containerID="cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7" Dec 13 07:30:13 crc kubenswrapper[4644]: E1213 07:30:13.650728 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7\": container with ID starting with cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7 not found: ID does not exist" containerID="cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.650771 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7"} err="failed to get container status \"cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7\": rpc error: code = NotFound desc = could not find container \"cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7\": container with ID starting with cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7 not found: ID does not exist" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.650797 4644 scope.go:117] "RemoveContainer" containerID="ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987" Dec 13 07:30:13 crc kubenswrapper[4644]: E1213 07:30:13.651221 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987\": container with ID starting with ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987 not found: ID does not exist" containerID="ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.651360 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987"} err="failed to get container status \"ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987\": rpc error: code = NotFound desc = could not find container \"ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987\": container with ID starting with ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987 not found: ID does not exist" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.651469 4644 scope.go:117] "RemoveContainer" containerID="cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.651824 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7"} err="failed to get container status \"cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7\": rpc error: code = NotFound desc = could not find container \"cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7\": container with ID starting with cf104e2b9d024a93cb43aa09a966a9c7f1b8f9a028df0f236ef3ce6a73aa8da7 not found: ID does not exist" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.651846 4644 scope.go:117] "RemoveContainer" containerID="ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.652075 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987"} err="failed to get container status \"ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987\": rpc error: code = NotFound desc = could not find container \"ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987\": container with ID starting with ff97adbc42b1d41ffc7c6371325220960190fc81e6397cf58a562ae2b2c3b987 not found: ID does not exist" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.938754 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.952103 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.965547 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 07:30:13 crc kubenswrapper[4644]: E1213 07:30:13.966121 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1024461e-3f2a-4de2-83be-c0899d822cce" containerName="collect-profiles" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.966146 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="1024461e-3f2a-4de2-83be-c0899d822cce" containerName="collect-profiles" Dec 13 07:30:13 crc kubenswrapper[4644]: E1213 07:30:13.966177 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac722a47-a17b-43ff-93ee-c240f65592cc" containerName="probe" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.966185 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac722a47-a17b-43ff-93ee-c240f65592cc" containerName="probe" Dec 13 07:30:13 crc kubenswrapper[4644]: E1213 07:30:13.966199 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac722a47-a17b-43ff-93ee-c240f65592cc" containerName="manila-share" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.966205 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac722a47-a17b-43ff-93ee-c240f65592cc" containerName="manila-share" Dec 13 07:30:13 crc kubenswrapper[4644]: E1213 07:30:13.966220 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6b6b66f-5b04-4c2e-8c8c-bb8c21198597" containerName="dnsmasq-dns" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.966226 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6b6b66f-5b04-4c2e-8c8c-bb8c21198597" containerName="dnsmasq-dns" Dec 13 07:30:13 crc kubenswrapper[4644]: E1213 07:30:13.966238 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6b6b66f-5b04-4c2e-8c8c-bb8c21198597" containerName="init" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.966244 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6b6b66f-5b04-4c2e-8c8c-bb8c21198597" containerName="init" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.966497 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac722a47-a17b-43ff-93ee-c240f65592cc" containerName="manila-share" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.966523 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="1024461e-3f2a-4de2-83be-c0899d822cce" containerName="collect-profiles" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.966534 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac722a47-a17b-43ff-93ee-c240f65592cc" containerName="probe" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.966553 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6b6b66f-5b04-4c2e-8c8c-bb8c21198597" containerName="dnsmasq-dns" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.967741 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.969365 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Dec 13 07:30:13 crc kubenswrapper[4644]: I1213 07:30:13.981485 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.043373 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-config-data\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.043488 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-scripts\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.043535 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.043592 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.043629 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.043685 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-ceph\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.043813 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45f85\" (UniqueName: \"kubernetes.io/projected/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-kube-api-access-45f85\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.043847 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.146992 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-scripts\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.147098 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.147134 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.147224 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.147253 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.147265 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-ceph\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.147378 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.147686 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45f85\" (UniqueName: \"kubernetes.io/projected/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-kube-api-access-45f85\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.147755 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.147909 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-config-data\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.152657 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.153090 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.153827 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-ceph\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.155525 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-scripts\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.164414 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45f85\" (UniqueName: \"kubernetes.io/projected/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-kube-api-access-45f85\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.169592 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc-config-data\") pod \"manila-share-share1-0\" (UID: \"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc\") " pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.289417 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.397837 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac722a47-a17b-43ff-93ee-c240f65592cc" path="/var/lib/kubelet/pods/ac722a47-a17b-43ff-93ee-c240f65592cc/volumes" Dec 13 07:30:14 crc kubenswrapper[4644]: I1213 07:30:14.749334 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 13 07:30:14 crc kubenswrapper[4644]: W1213 07:30:14.752635 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f44f3c6_c98c_4c26_bd47_9a0bf1cf2edc.slice/crio-f706c5325a70658236a9357dea124d35e11c583a34460ef1384d71d93ea8cbda WatchSource:0}: Error finding container f706c5325a70658236a9357dea124d35e11c583a34460ef1384d71d93ea8cbda: Status 404 returned error can't find the container with id f706c5325a70658236a9357dea124d35e11c583a34460ef1384d71d93ea8cbda Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.563874 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.579629 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-scripts\") pod \"292f2b2c-6449-4899-83fd-7c5dd3632696\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.579824 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-config-data\") pod \"292f2b2c-6449-4899-83fd-7c5dd3632696\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.580134 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-config-data-custom\") pod \"292f2b2c-6449-4899-83fd-7c5dd3632696\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.580240 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8v4q\" (UniqueName: \"kubernetes.io/projected/292f2b2c-6449-4899-83fd-7c5dd3632696-kube-api-access-l8v4q\") pod \"292f2b2c-6449-4899-83fd-7c5dd3632696\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.580776 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/292f2b2c-6449-4899-83fd-7c5dd3632696-etc-machine-id\") pod \"292f2b2c-6449-4899-83fd-7c5dd3632696\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.580816 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-combined-ca-bundle\") pod \"292f2b2c-6449-4899-83fd-7c5dd3632696\" (UID: \"292f2b2c-6449-4899-83fd-7c5dd3632696\") " Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.581074 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/292f2b2c-6449-4899-83fd-7c5dd3632696-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "292f2b2c-6449-4899-83fd-7c5dd3632696" (UID: "292f2b2c-6449-4899-83fd-7c5dd3632696"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.581823 4644 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/292f2b2c-6449-4899-83fd-7c5dd3632696-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.593235 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/292f2b2c-6449-4899-83fd-7c5dd3632696-kube-api-access-l8v4q" (OuterVolumeSpecName: "kube-api-access-l8v4q") pod "292f2b2c-6449-4899-83fd-7c5dd3632696" (UID: "292f2b2c-6449-4899-83fd-7c5dd3632696"). InnerVolumeSpecName "kube-api-access-l8v4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.596298 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "292f2b2c-6449-4899-83fd-7c5dd3632696" (UID: "292f2b2c-6449-4899-83fd-7c5dd3632696"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.603211 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-scripts" (OuterVolumeSpecName: "scripts") pod "292f2b2c-6449-4899-83fd-7c5dd3632696" (UID: "292f2b2c-6449-4899-83fd-7c5dd3632696"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.631693 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "292f2b2c-6449-4899-83fd-7c5dd3632696" (UID: "292f2b2c-6449-4899-83fd-7c5dd3632696"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.633318 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc","Type":"ContainerStarted","Data":"ffd7745b739715a2e40929cf2f8236da58756c21b0725f87abd99e25b9de6463"} Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.633365 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc","Type":"ContainerStarted","Data":"9fef4111878cbfd3a643ddac2b46ed9ba4408278f018afd34ef69a7377f9f74e"} Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.633377 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc","Type":"ContainerStarted","Data":"f706c5325a70658236a9357dea124d35e11c583a34460ef1384d71d93ea8cbda"} Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.635129 4644 generic.go:334] "Generic (PLEG): container finished" podID="292f2b2c-6449-4899-83fd-7c5dd3632696" containerID="372bbb832fe45cdb676bc352cb1796e775e87dbaa148ad0f4a2c34ab4e71abc2" exitCode=0 Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.635265 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"292f2b2c-6449-4899-83fd-7c5dd3632696","Type":"ContainerDied","Data":"372bbb832fe45cdb676bc352cb1796e775e87dbaa148ad0f4a2c34ab4e71abc2"} Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.635322 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"292f2b2c-6449-4899-83fd-7c5dd3632696","Type":"ContainerDied","Data":"d270226d3996ca1297ebeb8245739526c7d710ccaabb7af33e7dfa0faf4c1bcd"} Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.635319 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.635350 4644 scope.go:117] "RemoveContainer" containerID="ec29595ae8aad706b582735ef13a4ebabe6471ee15626b930afe073d8d29ac79" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.661698 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=2.661680799 podStartE2EDuration="2.661680799s" podCreationTimestamp="2025-12-13 07:30:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:30:15.655929796 +0000 UTC m=+2677.870880630" watchObservedRunningTime="2025-12-13 07:30:15.661680799 +0000 UTC m=+2677.876631632" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.672936 4644 scope.go:117] "RemoveContainer" containerID="372bbb832fe45cdb676bc352cb1796e775e87dbaa148ad0f4a2c34ab4e71abc2" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.678052 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-config-data" (OuterVolumeSpecName: "config-data") pod "292f2b2c-6449-4899-83fd-7c5dd3632696" (UID: "292f2b2c-6449-4899-83fd-7c5dd3632696"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.686310 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.686437 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.686495 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.686516 4644 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/292f2b2c-6449-4899-83fd-7c5dd3632696-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.686527 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8v4q\" (UniqueName: \"kubernetes.io/projected/292f2b2c-6449-4899-83fd-7c5dd3632696-kube-api-access-l8v4q\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.691112 4644 scope.go:117] "RemoveContainer" containerID="ec29595ae8aad706b582735ef13a4ebabe6471ee15626b930afe073d8d29ac79" Dec 13 07:30:15 crc kubenswrapper[4644]: E1213 07:30:15.691488 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec29595ae8aad706b582735ef13a4ebabe6471ee15626b930afe073d8d29ac79\": container with ID starting with ec29595ae8aad706b582735ef13a4ebabe6471ee15626b930afe073d8d29ac79 not found: ID does not exist" containerID="ec29595ae8aad706b582735ef13a4ebabe6471ee15626b930afe073d8d29ac79" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.691537 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec29595ae8aad706b582735ef13a4ebabe6471ee15626b930afe073d8d29ac79"} err="failed to get container status \"ec29595ae8aad706b582735ef13a4ebabe6471ee15626b930afe073d8d29ac79\": rpc error: code = NotFound desc = could not find container \"ec29595ae8aad706b582735ef13a4ebabe6471ee15626b930afe073d8d29ac79\": container with ID starting with ec29595ae8aad706b582735ef13a4ebabe6471ee15626b930afe073d8d29ac79 not found: ID does not exist" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.691559 4644 scope.go:117] "RemoveContainer" containerID="372bbb832fe45cdb676bc352cb1796e775e87dbaa148ad0f4a2c34ab4e71abc2" Dec 13 07:30:15 crc kubenswrapper[4644]: E1213 07:30:15.691868 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"372bbb832fe45cdb676bc352cb1796e775e87dbaa148ad0f4a2c34ab4e71abc2\": container with ID starting with 372bbb832fe45cdb676bc352cb1796e775e87dbaa148ad0f4a2c34ab4e71abc2 not found: ID does not exist" containerID="372bbb832fe45cdb676bc352cb1796e775e87dbaa148ad0f4a2c34ab4e71abc2" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.691890 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"372bbb832fe45cdb676bc352cb1796e775e87dbaa148ad0f4a2c34ab4e71abc2"} err="failed to get container status \"372bbb832fe45cdb676bc352cb1796e775e87dbaa148ad0f4a2c34ab4e71abc2\": rpc error: code = NotFound desc = could not find container \"372bbb832fe45cdb676bc352cb1796e775e87dbaa148ad0f4a2c34ab4e71abc2\": container with ID starting with 372bbb832fe45cdb676bc352cb1796e775e87dbaa148ad0f4a2c34ab4e71abc2 not found: ID does not exist" Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.983336 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 07:30:15 crc kubenswrapper[4644]: I1213 07:30:15.992011 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.012202 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 07:30:16 crc kubenswrapper[4644]: E1213 07:30:16.012592 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="292f2b2c-6449-4899-83fd-7c5dd3632696" containerName="manila-scheduler" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.012612 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="292f2b2c-6449-4899-83fd-7c5dd3632696" containerName="manila-scheduler" Dec 13 07:30:16 crc kubenswrapper[4644]: E1213 07:30:16.012624 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="292f2b2c-6449-4899-83fd-7c5dd3632696" containerName="probe" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.012631 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="292f2b2c-6449-4899-83fd-7c5dd3632696" containerName="probe" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.012855 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="292f2b2c-6449-4899-83fd-7c5dd3632696" containerName="manila-scheduler" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.012876 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="292f2b2c-6449-4899-83fd-7c5dd3632696" containerName="probe" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.013950 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.016361 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.034584 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.097149 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c69c54f-9157-4483-9cd6-3fe4a4035b75-config-data\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.097300 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c69c54f-9157-4483-9cd6-3fe4a4035b75-scripts\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.097499 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c69c54f-9157-4483-9cd6-3fe4a4035b75-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.097588 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c69c54f-9157-4483-9cd6-3fe4a4035b75-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.097713 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c69c54f-9157-4483-9cd6-3fe4a4035b75-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.097870 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnq5s\" (UniqueName: \"kubernetes.io/projected/2c69c54f-9157-4483-9cd6-3fe4a4035b75-kube-api-access-wnq5s\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.199834 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c69c54f-9157-4483-9cd6-3fe4a4035b75-config-data\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.199878 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c69c54f-9157-4483-9cd6-3fe4a4035b75-scripts\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.199943 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c69c54f-9157-4483-9cd6-3fe4a4035b75-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.199962 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c69c54f-9157-4483-9cd6-3fe4a4035b75-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.199999 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c69c54f-9157-4483-9cd6-3fe4a4035b75-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.200032 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnq5s\" (UniqueName: \"kubernetes.io/projected/2c69c54f-9157-4483-9cd6-3fe4a4035b75-kube-api-access-wnq5s\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.202290 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c69c54f-9157-4483-9cd6-3fe4a4035b75-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.210658 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c69c54f-9157-4483-9cd6-3fe4a4035b75-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.211034 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c69c54f-9157-4483-9cd6-3fe4a4035b75-scripts\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.211292 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c69c54f-9157-4483-9cd6-3fe4a4035b75-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.212081 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c69c54f-9157-4483-9cd6-3fe4a4035b75-config-data\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.217854 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnq5s\" (UniqueName: \"kubernetes.io/projected/2c69c54f-9157-4483-9cd6-3fe4a4035b75-kube-api-access-wnq5s\") pod \"manila-scheduler-0\" (UID: \"2c69c54f-9157-4483-9cd6-3fe4a4035b75\") " pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.374693 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.402041 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="292f2b2c-6449-4899-83fd-7c5dd3632696" path="/var/lib/kubelet/pods/292f2b2c-6449-4899-83fd-7c5dd3632696/volumes" Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.767898 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 13 07:30:16 crc kubenswrapper[4644]: W1213 07:30:16.776530 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c69c54f_9157_4483_9cd6_3fe4a4035b75.slice/crio-74e76947c539db26c63794a942bffdea71d3c86bd5fb028bced688b633f87376 WatchSource:0}: Error finding container 74e76947c539db26c63794a942bffdea71d3c86bd5fb028bced688b633f87376: Status 404 returned error can't find the container with id 74e76947c539db26c63794a942bffdea71d3c86bd5fb028bced688b633f87376 Dec 13 07:30:16 crc kubenswrapper[4644]: I1213 07:30:16.840754 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Dec 13 07:30:17 crc kubenswrapper[4644]: I1213 07:30:17.665859 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"2c69c54f-9157-4483-9cd6-3fe4a4035b75","Type":"ContainerStarted","Data":"65212fae37f1f1c8672b05fd2d5bd8ea3c45dfe32e3786ef5922b236390a4edd"} Dec 13 07:30:17 crc kubenswrapper[4644]: I1213 07:30:17.666418 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"2c69c54f-9157-4483-9cd6-3fe4a4035b75","Type":"ContainerStarted","Data":"72fa7fd47ad5c8461833a7ed86e5d7e52b75e0e83b20a4929ec82a43db01ee39"} Dec 13 07:30:17 crc kubenswrapper[4644]: I1213 07:30:17.666431 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"2c69c54f-9157-4483-9cd6-3fe4a4035b75","Type":"ContainerStarted","Data":"74e76947c539db26c63794a942bffdea71d3c86bd5fb028bced688b633f87376"} Dec 13 07:30:17 crc kubenswrapper[4644]: I1213 07:30:17.689082 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=2.689068997 podStartE2EDuration="2.689068997s" podCreationTimestamp="2025-12-13 07:30:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:30:17.681649557 +0000 UTC m=+2679.896600390" watchObservedRunningTime="2025-12-13 07:30:17.689068997 +0000 UTC m=+2679.904019830" Dec 13 07:30:24 crc kubenswrapper[4644]: I1213 07:30:24.290149 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Dec 13 07:30:26 crc kubenswrapper[4644]: I1213 07:30:26.375830 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Dec 13 07:30:27 crc kubenswrapper[4644]: I1213 07:30:27.810560 4644 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 13 07:30:35 crc kubenswrapper[4644]: I1213 07:30:35.598611 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Dec 13 07:30:35 crc kubenswrapper[4644]: I1213 07:30:35.809757 4644 generic.go:334] "Generic (PLEG): container finished" podID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerID="bc39298bde6d469acaca8fcdfecbb00170bb1422eb784797e2cb1cfcb49f00ee" exitCode=137 Dec 13 07:30:35 crc kubenswrapper[4644]: I1213 07:30:35.809829 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7","Type":"ContainerDied","Data":"bc39298bde6d469acaca8fcdfecbb00170bb1422eb784797e2cb1cfcb49f00ee"} Dec 13 07:30:35 crc kubenswrapper[4644]: I1213 07:30:35.891836 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:30:35 crc kubenswrapper[4644]: I1213 07:30:35.998972 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-ceilometer-tls-certs\") pod \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " Dec 13 07:30:35 crc kubenswrapper[4644]: I1213 07:30:35.999048 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-log-httpd\") pod \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " Dec 13 07:30:35 crc kubenswrapper[4644]: I1213 07:30:35.999074 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-config-data\") pod \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " Dec 13 07:30:35 crc kubenswrapper[4644]: I1213 07:30:35.999152 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-run-httpd\") pod \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " Dec 13 07:30:35 crc kubenswrapper[4644]: I1213 07:30:35.999176 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-scripts\") pod \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " Dec 13 07:30:35 crc kubenswrapper[4644]: I1213 07:30:35.999240 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-combined-ca-bundle\") pod \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " Dec 13 07:30:35 crc kubenswrapper[4644]: I1213 07:30:35.999415 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-sg-core-conf-yaml\") pod \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " Dec 13 07:30:35 crc kubenswrapper[4644]: I1213 07:30:35.999529 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22m4p\" (UniqueName: \"kubernetes.io/projected/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-kube-api-access-22m4p\") pod \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\" (UID: \"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7\") " Dec 13 07:30:35 crc kubenswrapper[4644]: I1213 07:30:35.999766 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" (UID: "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.000015 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" (UID: "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.000481 4644 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.000503 4644 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.006160 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-scripts" (OuterVolumeSpecName: "scripts") pod "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" (UID: "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.006805 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-kube-api-access-22m4p" (OuterVolumeSpecName: "kube-api-access-22m4p") pod "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" (UID: "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7"). InnerVolumeSpecName "kube-api-access-22m4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.027287 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" (UID: "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.042871 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" (UID: "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.069872 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" (UID: "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.084263 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-config-data" (OuterVolumeSpecName: "config-data") pod "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" (UID: "36f6c0c4-95b7-4edf-9054-c4d4dc671ae7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.103109 4644 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-scripts\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.103146 4644 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.103160 4644 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.103171 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22m4p\" (UniqueName: \"kubernetes.io/projected/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-kube-api-access-22m4p\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.103181 4644 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.103193 4644 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7-config-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.821101 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36f6c0c4-95b7-4edf-9054-c4d4dc671ae7","Type":"ContainerDied","Data":"b32d41deaef1ca26365f2e5df88f39939b6c2030b8ea375221c9448f42e3c1fd"} Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.821151 4644 scope.go:117] "RemoveContainer" containerID="bc39298bde6d469acaca8fcdfecbb00170bb1422eb784797e2cb1cfcb49f00ee" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.821249 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.846111 4644 scope.go:117] "RemoveContainer" containerID="e24e853afc8bfb844a90985934299b44d3d1e8468f508310d700eab2d410f69b" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.846535 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.860570 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.871341 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:30:36 crc kubenswrapper[4644]: E1213 07:30:36.871836 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="ceilometer-central-agent" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.871858 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="ceilometer-central-agent" Dec 13 07:30:36 crc kubenswrapper[4644]: E1213 07:30:36.871883 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="ceilometer-notification-agent" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.871889 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="ceilometer-notification-agent" Dec 13 07:30:36 crc kubenswrapper[4644]: E1213 07:30:36.871918 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="sg-core" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.871924 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="sg-core" Dec 13 07:30:36 crc kubenswrapper[4644]: E1213 07:30:36.871938 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="proxy-httpd" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.871944 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="proxy-httpd" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.872139 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="sg-core" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.872161 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="ceilometer-notification-agent" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.872177 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="proxy-httpd" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.872191 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" containerName="ceilometer-central-agent" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.873855 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.873870 4644 scope.go:117] "RemoveContainer" containerID="e9d273fc319662b954b92d6af5bc260e3b81503e3b52e02140d18cb14171d87c" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.877488 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.877871 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.880937 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.884232 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.903504 4644 scope.go:117] "RemoveContainer" containerID="eeb5fbb63d3af77219a3583eb45f12ca803e63d9d47a230dd5787d55f86055d8" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.918410 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/17956119-da29-422c-b808-c0894731283e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.918534 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72t4s\" (UniqueName: \"kubernetes.io/projected/17956119-da29-422c-b808-c0894731283e-kube-api-access-72t4s\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.918632 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17956119-da29-422c-b808-c0894731283e-config-data\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.918664 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17956119-da29-422c-b808-c0894731283e-log-httpd\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.918716 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17956119-da29-422c-b808-c0894731283e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.918743 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17956119-da29-422c-b808-c0894731283e-scripts\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.918771 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17956119-da29-422c-b808-c0894731283e-run-httpd\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:36 crc kubenswrapper[4644]: I1213 07:30:36.918897 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/17956119-da29-422c-b808-c0894731283e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.021061 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/17956119-da29-422c-b808-c0894731283e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.021117 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/17956119-da29-422c-b808-c0894731283e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.021147 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72t4s\" (UniqueName: \"kubernetes.io/projected/17956119-da29-422c-b808-c0894731283e-kube-api-access-72t4s\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.021219 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17956119-da29-422c-b808-c0894731283e-config-data\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.021248 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17956119-da29-422c-b808-c0894731283e-log-httpd\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.021288 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17956119-da29-422c-b808-c0894731283e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.021831 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17956119-da29-422c-b808-c0894731283e-log-httpd\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.021927 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17956119-da29-422c-b808-c0894731283e-scripts\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.022358 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17956119-da29-422c-b808-c0894731283e-run-httpd\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.022645 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17956119-da29-422c-b808-c0894731283e-run-httpd\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.027245 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/17956119-da29-422c-b808-c0894731283e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.027577 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/17956119-da29-422c-b808-c0894731283e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.027670 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17956119-da29-422c-b808-c0894731283e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.028012 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17956119-da29-422c-b808-c0894731283e-scripts\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.028868 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17956119-da29-422c-b808-c0894731283e-config-data\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.049102 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72t4s\" (UniqueName: \"kubernetes.io/projected/17956119-da29-422c-b808-c0894731283e-kube-api-access-72t4s\") pod \"ceilometer-0\" (UID: \"17956119-da29-422c-b808-c0894731283e\") " pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.194656 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.615829 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.664581 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Dec 13 07:30:37 crc kubenswrapper[4644]: I1213 07:30:37.833898 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"17956119-da29-422c-b808-c0894731283e","Type":"ContainerStarted","Data":"29ff77e649416dee108cd47f16dd7c4d5be05790c4aab932cf7397b4af4f70e1"} Dec 13 07:30:38 crc kubenswrapper[4644]: I1213 07:30:38.401863 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36f6c0c4-95b7-4edf-9054-c4d4dc671ae7" path="/var/lib/kubelet/pods/36f6c0c4-95b7-4edf-9054-c4d4dc671ae7/volumes" Dec 13 07:30:38 crc kubenswrapper[4644]: I1213 07:30:38.844276 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"17956119-da29-422c-b808-c0894731283e","Type":"ContainerStarted","Data":"0b8f110fae7818c96bd8988c1401eb46dbfc7401c3702ed5e1913f4285363ecf"} Dec 13 07:30:39 crc kubenswrapper[4644]: I1213 07:30:39.854459 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"17956119-da29-422c-b808-c0894731283e","Type":"ContainerStarted","Data":"b65af71a64a1dbdca266b5bdfc137cf3c4664f009c6e9cfd7000a2546977e2fc"} Dec 13 07:30:40 crc kubenswrapper[4644]: I1213 07:30:40.866106 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"17956119-da29-422c-b808-c0894731283e","Type":"ContainerStarted","Data":"ac9aad4398da170dc9ce8e0a6a0fa0d48d0924ef11418e6ca59c08fa4bbe0a2b"} Dec 13 07:30:41 crc kubenswrapper[4644]: I1213 07:30:41.875155 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"17956119-da29-422c-b808-c0894731283e","Type":"ContainerStarted","Data":"3944ba8c856c2873ae950355d4dc18699c9064a9030364dc979a5adce1d2dcd2"} Dec 13 07:30:41 crc kubenswrapper[4644]: I1213 07:30:41.876139 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 13 07:30:41 crc kubenswrapper[4644]: I1213 07:30:41.895808 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.901263913 podStartE2EDuration="5.895790028s" podCreationTimestamp="2025-12-13 07:30:36 +0000 UTC" firstStartedPulling="2025-12-13 07:30:37.623895072 +0000 UTC m=+2699.838845905" lastFinishedPulling="2025-12-13 07:30:41.618421187 +0000 UTC m=+2703.833372020" observedRunningTime="2025-12-13 07:30:41.889696631 +0000 UTC m=+2704.104647465" watchObservedRunningTime="2025-12-13 07:30:41.895790028 +0000 UTC m=+2704.110740861" Dec 13 07:30:43 crc kubenswrapper[4644]: I1213 07:30:43.428402 4644 scope.go:117] "RemoveContainer" containerID="556da2544417e0c0d8dc849dbaf7bf0d3727f9f9fb668868c86ebbe5af570a7c" Dec 13 07:30:59 crc kubenswrapper[4644]: I1213 07:30:59.802142 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-f84mf"] Dec 13 07:30:59 crc kubenswrapper[4644]: I1213 07:30:59.804683 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:30:59 crc kubenswrapper[4644]: I1213 07:30:59.808855 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f84mf"] Dec 13 07:30:59 crc kubenswrapper[4644]: I1213 07:30:59.922838 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/519d3214-4add-4ce6-a572-05cd07ea0107-catalog-content\") pod \"redhat-operators-f84mf\" (UID: \"519d3214-4add-4ce6-a572-05cd07ea0107\") " pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:30:59 crc kubenswrapper[4644]: I1213 07:30:59.923103 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6ptf\" (UniqueName: \"kubernetes.io/projected/519d3214-4add-4ce6-a572-05cd07ea0107-kube-api-access-k6ptf\") pod \"redhat-operators-f84mf\" (UID: \"519d3214-4add-4ce6-a572-05cd07ea0107\") " pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:30:59 crc kubenswrapper[4644]: I1213 07:30:59.923132 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/519d3214-4add-4ce6-a572-05cd07ea0107-utilities\") pod \"redhat-operators-f84mf\" (UID: \"519d3214-4add-4ce6-a572-05cd07ea0107\") " pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:31:00 crc kubenswrapper[4644]: I1213 07:31:00.025330 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6ptf\" (UniqueName: \"kubernetes.io/projected/519d3214-4add-4ce6-a572-05cd07ea0107-kube-api-access-k6ptf\") pod \"redhat-operators-f84mf\" (UID: \"519d3214-4add-4ce6-a572-05cd07ea0107\") " pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:31:00 crc kubenswrapper[4644]: I1213 07:31:00.025383 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/519d3214-4add-4ce6-a572-05cd07ea0107-utilities\") pod \"redhat-operators-f84mf\" (UID: \"519d3214-4add-4ce6-a572-05cd07ea0107\") " pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:31:00 crc kubenswrapper[4644]: I1213 07:31:00.025474 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/519d3214-4add-4ce6-a572-05cd07ea0107-catalog-content\") pod \"redhat-operators-f84mf\" (UID: \"519d3214-4add-4ce6-a572-05cd07ea0107\") " pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:31:00 crc kubenswrapper[4644]: I1213 07:31:00.025932 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/519d3214-4add-4ce6-a572-05cd07ea0107-utilities\") pod \"redhat-operators-f84mf\" (UID: \"519d3214-4add-4ce6-a572-05cd07ea0107\") " pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:31:00 crc kubenswrapper[4644]: I1213 07:31:00.025949 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/519d3214-4add-4ce6-a572-05cd07ea0107-catalog-content\") pod \"redhat-operators-f84mf\" (UID: \"519d3214-4add-4ce6-a572-05cd07ea0107\") " pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:31:00 crc kubenswrapper[4644]: I1213 07:31:00.051132 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6ptf\" (UniqueName: \"kubernetes.io/projected/519d3214-4add-4ce6-a572-05cd07ea0107-kube-api-access-k6ptf\") pod \"redhat-operators-f84mf\" (UID: \"519d3214-4add-4ce6-a572-05cd07ea0107\") " pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:31:00 crc kubenswrapper[4644]: I1213 07:31:00.121131 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:31:00 crc kubenswrapper[4644]: I1213 07:31:00.545737 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f84mf"] Dec 13 07:31:01 crc kubenswrapper[4644]: I1213 07:31:01.024042 4644 generic.go:334] "Generic (PLEG): container finished" podID="519d3214-4add-4ce6-a572-05cd07ea0107" containerID="aa5c9865662aa17c888cf68b8cf3063619cefc6e4c8707b9399daa4d422ca2fe" exitCode=0 Dec 13 07:31:01 crc kubenswrapper[4644]: I1213 07:31:01.024110 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f84mf" event={"ID":"519d3214-4add-4ce6-a572-05cd07ea0107","Type":"ContainerDied","Data":"aa5c9865662aa17c888cf68b8cf3063619cefc6e4c8707b9399daa4d422ca2fe"} Dec 13 07:31:01 crc kubenswrapper[4644]: I1213 07:31:01.024143 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f84mf" event={"ID":"519d3214-4add-4ce6-a572-05cd07ea0107","Type":"ContainerStarted","Data":"90da877ce8841cb8e7acac1905a5a340de42ffac0024a2a6491cc759795aae3a"} Dec 13 07:31:03 crc kubenswrapper[4644]: I1213 07:31:03.040560 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f84mf" event={"ID":"519d3214-4add-4ce6-a572-05cd07ea0107","Type":"ContainerStarted","Data":"cad8b9941bda028baf72e13a3a3d727f3e0df995e06b6f5aab1b3a243f9f559b"} Dec 13 07:31:04 crc kubenswrapper[4644]: I1213 07:31:04.052048 4644 generic.go:334] "Generic (PLEG): container finished" podID="519d3214-4add-4ce6-a572-05cd07ea0107" containerID="cad8b9941bda028baf72e13a3a3d727f3e0df995e06b6f5aab1b3a243f9f559b" exitCode=0 Dec 13 07:31:04 crc kubenswrapper[4644]: I1213 07:31:04.052134 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f84mf" event={"ID":"519d3214-4add-4ce6-a572-05cd07ea0107","Type":"ContainerDied","Data":"cad8b9941bda028baf72e13a3a3d727f3e0df995e06b6f5aab1b3a243f9f559b"} Dec 13 07:31:05 crc kubenswrapper[4644]: I1213 07:31:05.063151 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f84mf" event={"ID":"519d3214-4add-4ce6-a572-05cd07ea0107","Type":"ContainerStarted","Data":"816ac90dc88810eea5300b44892c3e65dbf238b9bb6c8d6d66fd13915c349f1d"} Dec 13 07:31:05 crc kubenswrapper[4644]: I1213 07:31:05.082833 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-f84mf" podStartSLOduration=2.538141311 podStartE2EDuration="6.08281175s" podCreationTimestamp="2025-12-13 07:30:59 +0000 UTC" firstStartedPulling="2025-12-13 07:31:01.025662413 +0000 UTC m=+2723.240613236" lastFinishedPulling="2025-12-13 07:31:04.570332842 +0000 UTC m=+2726.785283675" observedRunningTime="2025-12-13 07:31:05.080573322 +0000 UTC m=+2727.295524156" watchObservedRunningTime="2025-12-13 07:31:05.08281175 +0000 UTC m=+2727.297762583" Dec 13 07:31:05 crc kubenswrapper[4644]: I1213 07:31:05.184015 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-klxpd"] Dec 13 07:31:05 crc kubenswrapper[4644]: I1213 07:31:05.185833 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:05 crc kubenswrapper[4644]: I1213 07:31:05.198906 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-klxpd"] Dec 13 07:31:05 crc kubenswrapper[4644]: I1213 07:31:05.348508 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/207e9f99-4eda-431a-831b-96bb485d3b0e-utilities\") pod \"redhat-marketplace-klxpd\" (UID: \"207e9f99-4eda-431a-831b-96bb485d3b0e\") " pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:05 crc kubenswrapper[4644]: I1213 07:31:05.348724 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/207e9f99-4eda-431a-831b-96bb485d3b0e-catalog-content\") pod \"redhat-marketplace-klxpd\" (UID: \"207e9f99-4eda-431a-831b-96bb485d3b0e\") " pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:05 crc kubenswrapper[4644]: I1213 07:31:05.348753 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfq7z\" (UniqueName: \"kubernetes.io/projected/207e9f99-4eda-431a-831b-96bb485d3b0e-kube-api-access-hfq7z\") pod \"redhat-marketplace-klxpd\" (UID: \"207e9f99-4eda-431a-831b-96bb485d3b0e\") " pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:05 crc kubenswrapper[4644]: I1213 07:31:05.451054 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/207e9f99-4eda-431a-831b-96bb485d3b0e-catalog-content\") pod \"redhat-marketplace-klxpd\" (UID: \"207e9f99-4eda-431a-831b-96bb485d3b0e\") " pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:05 crc kubenswrapper[4644]: I1213 07:31:05.451094 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfq7z\" (UniqueName: \"kubernetes.io/projected/207e9f99-4eda-431a-831b-96bb485d3b0e-kube-api-access-hfq7z\") pod \"redhat-marketplace-klxpd\" (UID: \"207e9f99-4eda-431a-831b-96bb485d3b0e\") " pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:05 crc kubenswrapper[4644]: I1213 07:31:05.451184 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/207e9f99-4eda-431a-831b-96bb485d3b0e-utilities\") pod \"redhat-marketplace-klxpd\" (UID: \"207e9f99-4eda-431a-831b-96bb485d3b0e\") " pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:05 crc kubenswrapper[4644]: I1213 07:31:05.451730 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/207e9f99-4eda-431a-831b-96bb485d3b0e-utilities\") pod \"redhat-marketplace-klxpd\" (UID: \"207e9f99-4eda-431a-831b-96bb485d3b0e\") " pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:05 crc kubenswrapper[4644]: I1213 07:31:05.451750 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/207e9f99-4eda-431a-831b-96bb485d3b0e-catalog-content\") pod \"redhat-marketplace-klxpd\" (UID: \"207e9f99-4eda-431a-831b-96bb485d3b0e\") " pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:05 crc kubenswrapper[4644]: I1213 07:31:05.483062 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfq7z\" (UniqueName: \"kubernetes.io/projected/207e9f99-4eda-431a-831b-96bb485d3b0e-kube-api-access-hfq7z\") pod \"redhat-marketplace-klxpd\" (UID: \"207e9f99-4eda-431a-831b-96bb485d3b0e\") " pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:05 crc kubenswrapper[4644]: I1213 07:31:05.501157 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:06 crc kubenswrapper[4644]: I1213 07:31:06.034047 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-klxpd"] Dec 13 07:31:06 crc kubenswrapper[4644]: I1213 07:31:06.072476 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klxpd" event={"ID":"207e9f99-4eda-431a-831b-96bb485d3b0e","Type":"ContainerStarted","Data":"a0208320c15d35581f17f685a50dffce5c2224cf6b45b921abd9ba989acd835d"} Dec 13 07:31:07 crc kubenswrapper[4644]: I1213 07:31:07.081588 4644 generic.go:334] "Generic (PLEG): container finished" podID="207e9f99-4eda-431a-831b-96bb485d3b0e" containerID="c27e2321eec11a8886bb6ca6dd2891e0df35d13e8534b07d6f61dc37549f0ffb" exitCode=0 Dec 13 07:31:07 crc kubenswrapper[4644]: I1213 07:31:07.081658 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klxpd" event={"ID":"207e9f99-4eda-431a-831b-96bb485d3b0e","Type":"ContainerDied","Data":"c27e2321eec11a8886bb6ca6dd2891e0df35d13e8534b07d6f61dc37549f0ffb"} Dec 13 07:31:07 crc kubenswrapper[4644]: I1213 07:31:07.201707 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 13 07:31:08 crc kubenswrapper[4644]: I1213 07:31:08.092223 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klxpd" event={"ID":"207e9f99-4eda-431a-831b-96bb485d3b0e","Type":"ContainerStarted","Data":"e4e43b2058d02d55fe8503d9fb0e150f34a86a1b1bc6b4fa130d14f2377312e5"} Dec 13 07:31:09 crc kubenswrapper[4644]: I1213 07:31:09.103050 4644 generic.go:334] "Generic (PLEG): container finished" podID="207e9f99-4eda-431a-831b-96bb485d3b0e" containerID="e4e43b2058d02d55fe8503d9fb0e150f34a86a1b1bc6b4fa130d14f2377312e5" exitCode=0 Dec 13 07:31:09 crc kubenswrapper[4644]: I1213 07:31:09.103151 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klxpd" event={"ID":"207e9f99-4eda-431a-831b-96bb485d3b0e","Type":"ContainerDied","Data":"e4e43b2058d02d55fe8503d9fb0e150f34a86a1b1bc6b4fa130d14f2377312e5"} Dec 13 07:31:09 crc kubenswrapper[4644]: I1213 07:31:09.753457 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:31:09 crc kubenswrapper[4644]: I1213 07:31:09.753734 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:31:10 crc kubenswrapper[4644]: I1213 07:31:10.113462 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klxpd" event={"ID":"207e9f99-4eda-431a-831b-96bb485d3b0e","Type":"ContainerStarted","Data":"5cc7fe0c46ab9c833a4f6385a83758484957b5e6cc94426edb591b5de8a98867"} Dec 13 07:31:10 crc kubenswrapper[4644]: I1213 07:31:10.121618 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:31:10 crc kubenswrapper[4644]: I1213 07:31:10.121706 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:31:10 crc kubenswrapper[4644]: I1213 07:31:10.133998 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-klxpd" podStartSLOduration=2.614904199 podStartE2EDuration="5.133980856s" podCreationTimestamp="2025-12-13 07:31:05 +0000 UTC" firstStartedPulling="2025-12-13 07:31:07.083501281 +0000 UTC m=+2729.298452113" lastFinishedPulling="2025-12-13 07:31:09.602577937 +0000 UTC m=+2731.817528770" observedRunningTime="2025-12-13 07:31:10.13121224 +0000 UTC m=+2732.346163264" watchObservedRunningTime="2025-12-13 07:31:10.133980856 +0000 UTC m=+2732.348931679" Dec 13 07:31:10 crc kubenswrapper[4644]: I1213 07:31:10.161712 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:31:11 crc kubenswrapper[4644]: I1213 07:31:11.164798 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:31:12 crc kubenswrapper[4644]: I1213 07:31:12.377941 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f84mf"] Dec 13 07:31:14 crc kubenswrapper[4644]: I1213 07:31:14.152111 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-f84mf" podUID="519d3214-4add-4ce6-a572-05cd07ea0107" containerName="registry-server" containerID="cri-o://816ac90dc88810eea5300b44892c3e65dbf238b9bb6c8d6d66fd13915c349f1d" gracePeriod=2 Dec 13 07:31:14 crc kubenswrapper[4644]: I1213 07:31:14.621234 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:31:14 crc kubenswrapper[4644]: I1213 07:31:14.756033 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/519d3214-4add-4ce6-a572-05cd07ea0107-catalog-content\") pod \"519d3214-4add-4ce6-a572-05cd07ea0107\" (UID: \"519d3214-4add-4ce6-a572-05cd07ea0107\") " Dec 13 07:31:14 crc kubenswrapper[4644]: I1213 07:31:14.756146 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6ptf\" (UniqueName: \"kubernetes.io/projected/519d3214-4add-4ce6-a572-05cd07ea0107-kube-api-access-k6ptf\") pod \"519d3214-4add-4ce6-a572-05cd07ea0107\" (UID: \"519d3214-4add-4ce6-a572-05cd07ea0107\") " Dec 13 07:31:14 crc kubenswrapper[4644]: I1213 07:31:14.756253 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/519d3214-4add-4ce6-a572-05cd07ea0107-utilities\") pod \"519d3214-4add-4ce6-a572-05cd07ea0107\" (UID: \"519d3214-4add-4ce6-a572-05cd07ea0107\") " Dec 13 07:31:14 crc kubenswrapper[4644]: I1213 07:31:14.757030 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/519d3214-4add-4ce6-a572-05cd07ea0107-utilities" (OuterVolumeSpecName: "utilities") pod "519d3214-4add-4ce6-a572-05cd07ea0107" (UID: "519d3214-4add-4ce6-a572-05cd07ea0107"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:31:14 crc kubenswrapper[4644]: I1213 07:31:14.768714 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/519d3214-4add-4ce6-a572-05cd07ea0107-kube-api-access-k6ptf" (OuterVolumeSpecName: "kube-api-access-k6ptf") pod "519d3214-4add-4ce6-a572-05cd07ea0107" (UID: "519d3214-4add-4ce6-a572-05cd07ea0107"). InnerVolumeSpecName "kube-api-access-k6ptf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:31:14 crc kubenswrapper[4644]: I1213 07:31:14.839089 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/519d3214-4add-4ce6-a572-05cd07ea0107-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "519d3214-4add-4ce6-a572-05cd07ea0107" (UID: "519d3214-4add-4ce6-a572-05cd07ea0107"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:31:14 crc kubenswrapper[4644]: I1213 07:31:14.860719 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/519d3214-4add-4ce6-a572-05cd07ea0107-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:31:14 crc kubenswrapper[4644]: I1213 07:31:14.860748 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6ptf\" (UniqueName: \"kubernetes.io/projected/519d3214-4add-4ce6-a572-05cd07ea0107-kube-api-access-k6ptf\") on node \"crc\" DevicePath \"\"" Dec 13 07:31:14 crc kubenswrapper[4644]: I1213 07:31:14.860763 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/519d3214-4add-4ce6-a572-05cd07ea0107-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.161642 4644 generic.go:334] "Generic (PLEG): container finished" podID="519d3214-4add-4ce6-a572-05cd07ea0107" containerID="816ac90dc88810eea5300b44892c3e65dbf238b9bb6c8d6d66fd13915c349f1d" exitCode=0 Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.161681 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f84mf" event={"ID":"519d3214-4add-4ce6-a572-05cd07ea0107","Type":"ContainerDied","Data":"816ac90dc88810eea5300b44892c3e65dbf238b9bb6c8d6d66fd13915c349f1d"} Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.161705 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f84mf" event={"ID":"519d3214-4add-4ce6-a572-05cd07ea0107","Type":"ContainerDied","Data":"90da877ce8841cb8e7acac1905a5a340de42ffac0024a2a6491cc759795aae3a"} Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.161722 4644 scope.go:117] "RemoveContainer" containerID="816ac90dc88810eea5300b44892c3e65dbf238b9bb6c8d6d66fd13915c349f1d" Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.161832 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f84mf" Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.196077 4644 scope.go:117] "RemoveContainer" containerID="cad8b9941bda028baf72e13a3a3d727f3e0df995e06b6f5aab1b3a243f9f559b" Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.257083 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f84mf"] Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.261611 4644 scope.go:117] "RemoveContainer" containerID="aa5c9865662aa17c888cf68b8cf3063619cefc6e4c8707b9399daa4d422ca2fe" Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.275299 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-f84mf"] Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.299320 4644 scope.go:117] "RemoveContainer" containerID="816ac90dc88810eea5300b44892c3e65dbf238b9bb6c8d6d66fd13915c349f1d" Dec 13 07:31:15 crc kubenswrapper[4644]: E1213 07:31:15.300499 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"816ac90dc88810eea5300b44892c3e65dbf238b9bb6c8d6d66fd13915c349f1d\": container with ID starting with 816ac90dc88810eea5300b44892c3e65dbf238b9bb6c8d6d66fd13915c349f1d not found: ID does not exist" containerID="816ac90dc88810eea5300b44892c3e65dbf238b9bb6c8d6d66fd13915c349f1d" Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.300549 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"816ac90dc88810eea5300b44892c3e65dbf238b9bb6c8d6d66fd13915c349f1d"} err="failed to get container status \"816ac90dc88810eea5300b44892c3e65dbf238b9bb6c8d6d66fd13915c349f1d\": rpc error: code = NotFound desc = could not find container \"816ac90dc88810eea5300b44892c3e65dbf238b9bb6c8d6d66fd13915c349f1d\": container with ID starting with 816ac90dc88810eea5300b44892c3e65dbf238b9bb6c8d6d66fd13915c349f1d not found: ID does not exist" Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.300574 4644 scope.go:117] "RemoveContainer" containerID="cad8b9941bda028baf72e13a3a3d727f3e0df995e06b6f5aab1b3a243f9f559b" Dec 13 07:31:15 crc kubenswrapper[4644]: E1213 07:31:15.301036 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cad8b9941bda028baf72e13a3a3d727f3e0df995e06b6f5aab1b3a243f9f559b\": container with ID starting with cad8b9941bda028baf72e13a3a3d727f3e0df995e06b6f5aab1b3a243f9f559b not found: ID does not exist" containerID="cad8b9941bda028baf72e13a3a3d727f3e0df995e06b6f5aab1b3a243f9f559b" Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.301075 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cad8b9941bda028baf72e13a3a3d727f3e0df995e06b6f5aab1b3a243f9f559b"} err="failed to get container status \"cad8b9941bda028baf72e13a3a3d727f3e0df995e06b6f5aab1b3a243f9f559b\": rpc error: code = NotFound desc = could not find container \"cad8b9941bda028baf72e13a3a3d727f3e0df995e06b6f5aab1b3a243f9f559b\": container with ID starting with cad8b9941bda028baf72e13a3a3d727f3e0df995e06b6f5aab1b3a243f9f559b not found: ID does not exist" Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.301100 4644 scope.go:117] "RemoveContainer" containerID="aa5c9865662aa17c888cf68b8cf3063619cefc6e4c8707b9399daa4d422ca2fe" Dec 13 07:31:15 crc kubenswrapper[4644]: E1213 07:31:15.301357 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa5c9865662aa17c888cf68b8cf3063619cefc6e4c8707b9399daa4d422ca2fe\": container with ID starting with aa5c9865662aa17c888cf68b8cf3063619cefc6e4c8707b9399daa4d422ca2fe not found: ID does not exist" containerID="aa5c9865662aa17c888cf68b8cf3063619cefc6e4c8707b9399daa4d422ca2fe" Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.301382 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa5c9865662aa17c888cf68b8cf3063619cefc6e4c8707b9399daa4d422ca2fe"} err="failed to get container status \"aa5c9865662aa17c888cf68b8cf3063619cefc6e4c8707b9399daa4d422ca2fe\": rpc error: code = NotFound desc = could not find container \"aa5c9865662aa17c888cf68b8cf3063619cefc6e4c8707b9399daa4d422ca2fe\": container with ID starting with aa5c9865662aa17c888cf68b8cf3063619cefc6e4c8707b9399daa4d422ca2fe not found: ID does not exist" Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.502331 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.503041 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:15 crc kubenswrapper[4644]: I1213 07:31:15.539695 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:16 crc kubenswrapper[4644]: I1213 07:31:16.206701 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:16 crc kubenswrapper[4644]: I1213 07:31:16.400259 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="519d3214-4add-4ce6-a572-05cd07ea0107" path="/var/lib/kubelet/pods/519d3214-4add-4ce6-a572-05cd07ea0107/volumes" Dec 13 07:31:17 crc kubenswrapper[4644]: I1213 07:31:17.775789 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-klxpd"] Dec 13 07:31:19 crc kubenswrapper[4644]: I1213 07:31:19.195890 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-klxpd" podUID="207e9f99-4eda-431a-831b-96bb485d3b0e" containerName="registry-server" containerID="cri-o://5cc7fe0c46ab9c833a4f6385a83758484957b5e6cc94426edb591b5de8a98867" gracePeriod=2 Dec 13 07:31:19 crc kubenswrapper[4644]: I1213 07:31:19.608508 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:19 crc kubenswrapper[4644]: I1213 07:31:19.768724 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hfq7z\" (UniqueName: \"kubernetes.io/projected/207e9f99-4eda-431a-831b-96bb485d3b0e-kube-api-access-hfq7z\") pod \"207e9f99-4eda-431a-831b-96bb485d3b0e\" (UID: \"207e9f99-4eda-431a-831b-96bb485d3b0e\") " Dec 13 07:31:19 crc kubenswrapper[4644]: I1213 07:31:19.769061 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/207e9f99-4eda-431a-831b-96bb485d3b0e-catalog-content\") pod \"207e9f99-4eda-431a-831b-96bb485d3b0e\" (UID: \"207e9f99-4eda-431a-831b-96bb485d3b0e\") " Dec 13 07:31:19 crc kubenswrapper[4644]: I1213 07:31:19.769892 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/207e9f99-4eda-431a-831b-96bb485d3b0e-utilities\") pod \"207e9f99-4eda-431a-831b-96bb485d3b0e\" (UID: \"207e9f99-4eda-431a-831b-96bb485d3b0e\") " Dec 13 07:31:19 crc kubenswrapper[4644]: I1213 07:31:19.770510 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/207e9f99-4eda-431a-831b-96bb485d3b0e-utilities" (OuterVolumeSpecName: "utilities") pod "207e9f99-4eda-431a-831b-96bb485d3b0e" (UID: "207e9f99-4eda-431a-831b-96bb485d3b0e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:31:19 crc kubenswrapper[4644]: I1213 07:31:19.771758 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/207e9f99-4eda-431a-831b-96bb485d3b0e-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:31:19 crc kubenswrapper[4644]: I1213 07:31:19.774583 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/207e9f99-4eda-431a-831b-96bb485d3b0e-kube-api-access-hfq7z" (OuterVolumeSpecName: "kube-api-access-hfq7z") pod "207e9f99-4eda-431a-831b-96bb485d3b0e" (UID: "207e9f99-4eda-431a-831b-96bb485d3b0e"). InnerVolumeSpecName "kube-api-access-hfq7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:31:19 crc kubenswrapper[4644]: I1213 07:31:19.783251 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/207e9f99-4eda-431a-831b-96bb485d3b0e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "207e9f99-4eda-431a-831b-96bb485d3b0e" (UID: "207e9f99-4eda-431a-831b-96bb485d3b0e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:31:19 crc kubenswrapper[4644]: I1213 07:31:19.874077 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfq7z\" (UniqueName: \"kubernetes.io/projected/207e9f99-4eda-431a-831b-96bb485d3b0e-kube-api-access-hfq7z\") on node \"crc\" DevicePath \"\"" Dec 13 07:31:19 crc kubenswrapper[4644]: I1213 07:31:19.874104 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/207e9f99-4eda-431a-831b-96bb485d3b0e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.209289 4644 generic.go:334] "Generic (PLEG): container finished" podID="207e9f99-4eda-431a-831b-96bb485d3b0e" containerID="5cc7fe0c46ab9c833a4f6385a83758484957b5e6cc94426edb591b5de8a98867" exitCode=0 Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.209334 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klxpd" event={"ID":"207e9f99-4eda-431a-831b-96bb485d3b0e","Type":"ContainerDied","Data":"5cc7fe0c46ab9c833a4f6385a83758484957b5e6cc94426edb591b5de8a98867"} Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.209349 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-klxpd" Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.209371 4644 scope.go:117] "RemoveContainer" containerID="5cc7fe0c46ab9c833a4f6385a83758484957b5e6cc94426edb591b5de8a98867" Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.209361 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klxpd" event={"ID":"207e9f99-4eda-431a-831b-96bb485d3b0e","Type":"ContainerDied","Data":"a0208320c15d35581f17f685a50dffce5c2224cf6b45b921abd9ba989acd835d"} Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.224411 4644 scope.go:117] "RemoveContainer" containerID="e4e43b2058d02d55fe8503d9fb0e150f34a86a1b1bc6b4fa130d14f2377312e5" Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.236707 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-klxpd"] Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.243270 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-klxpd"] Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.256174 4644 scope.go:117] "RemoveContainer" containerID="c27e2321eec11a8886bb6ca6dd2891e0df35d13e8534b07d6f61dc37549f0ffb" Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.273596 4644 scope.go:117] "RemoveContainer" containerID="5cc7fe0c46ab9c833a4f6385a83758484957b5e6cc94426edb591b5de8a98867" Dec 13 07:31:20 crc kubenswrapper[4644]: E1213 07:31:20.273957 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cc7fe0c46ab9c833a4f6385a83758484957b5e6cc94426edb591b5de8a98867\": container with ID starting with 5cc7fe0c46ab9c833a4f6385a83758484957b5e6cc94426edb591b5de8a98867 not found: ID does not exist" containerID="5cc7fe0c46ab9c833a4f6385a83758484957b5e6cc94426edb591b5de8a98867" Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.273999 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cc7fe0c46ab9c833a4f6385a83758484957b5e6cc94426edb591b5de8a98867"} err="failed to get container status \"5cc7fe0c46ab9c833a4f6385a83758484957b5e6cc94426edb591b5de8a98867\": rpc error: code = NotFound desc = could not find container \"5cc7fe0c46ab9c833a4f6385a83758484957b5e6cc94426edb591b5de8a98867\": container with ID starting with 5cc7fe0c46ab9c833a4f6385a83758484957b5e6cc94426edb591b5de8a98867 not found: ID does not exist" Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.274026 4644 scope.go:117] "RemoveContainer" containerID="e4e43b2058d02d55fe8503d9fb0e150f34a86a1b1bc6b4fa130d14f2377312e5" Dec 13 07:31:20 crc kubenswrapper[4644]: E1213 07:31:20.274367 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4e43b2058d02d55fe8503d9fb0e150f34a86a1b1bc6b4fa130d14f2377312e5\": container with ID starting with e4e43b2058d02d55fe8503d9fb0e150f34a86a1b1bc6b4fa130d14f2377312e5 not found: ID does not exist" containerID="e4e43b2058d02d55fe8503d9fb0e150f34a86a1b1bc6b4fa130d14f2377312e5" Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.274388 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4e43b2058d02d55fe8503d9fb0e150f34a86a1b1bc6b4fa130d14f2377312e5"} err="failed to get container status \"e4e43b2058d02d55fe8503d9fb0e150f34a86a1b1bc6b4fa130d14f2377312e5\": rpc error: code = NotFound desc = could not find container \"e4e43b2058d02d55fe8503d9fb0e150f34a86a1b1bc6b4fa130d14f2377312e5\": container with ID starting with e4e43b2058d02d55fe8503d9fb0e150f34a86a1b1bc6b4fa130d14f2377312e5 not found: ID does not exist" Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.274400 4644 scope.go:117] "RemoveContainer" containerID="c27e2321eec11a8886bb6ca6dd2891e0df35d13e8534b07d6f61dc37549f0ffb" Dec 13 07:31:20 crc kubenswrapper[4644]: E1213 07:31:20.274737 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c27e2321eec11a8886bb6ca6dd2891e0df35d13e8534b07d6f61dc37549f0ffb\": container with ID starting with c27e2321eec11a8886bb6ca6dd2891e0df35d13e8534b07d6f61dc37549f0ffb not found: ID does not exist" containerID="c27e2321eec11a8886bb6ca6dd2891e0df35d13e8534b07d6f61dc37549f0ffb" Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.274771 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c27e2321eec11a8886bb6ca6dd2891e0df35d13e8534b07d6f61dc37549f0ffb"} err="failed to get container status \"c27e2321eec11a8886bb6ca6dd2891e0df35d13e8534b07d6f61dc37549f0ffb\": rpc error: code = NotFound desc = could not find container \"c27e2321eec11a8886bb6ca6dd2891e0df35d13e8534b07d6f61dc37549f0ffb\": container with ID starting with c27e2321eec11a8886bb6ca6dd2891e0df35d13e8534b07d6f61dc37549f0ffb not found: ID does not exist" Dec 13 07:31:20 crc kubenswrapper[4644]: I1213 07:31:20.407534 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="207e9f99-4eda-431a-831b-96bb485d3b0e" path="/var/lib/kubelet/pods/207e9f99-4eda-431a-831b-96bb485d3b0e/volumes" Dec 13 07:31:39 crc kubenswrapper[4644]: I1213 07:31:39.753585 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:31:39 crc kubenswrapper[4644]: I1213 07:31:39.753980 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.058674 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-859586489-hlw4r"] Dec 13 07:31:57 crc kubenswrapper[4644]: E1213 07:31:57.059735 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="519d3214-4add-4ce6-a572-05cd07ea0107" containerName="extract-content" Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.059750 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="519d3214-4add-4ce6-a572-05cd07ea0107" containerName="extract-content" Dec 13 07:31:57 crc kubenswrapper[4644]: E1213 07:31:57.059762 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="519d3214-4add-4ce6-a572-05cd07ea0107" containerName="registry-server" Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.059768 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="519d3214-4add-4ce6-a572-05cd07ea0107" containerName="registry-server" Dec 13 07:31:57 crc kubenswrapper[4644]: E1213 07:31:57.059782 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="207e9f99-4eda-431a-831b-96bb485d3b0e" containerName="extract-utilities" Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.059788 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="207e9f99-4eda-431a-831b-96bb485d3b0e" containerName="extract-utilities" Dec 13 07:31:57 crc kubenswrapper[4644]: E1213 07:31:57.059809 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="207e9f99-4eda-431a-831b-96bb485d3b0e" containerName="registry-server" Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.059815 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="207e9f99-4eda-431a-831b-96bb485d3b0e" containerName="registry-server" Dec 13 07:31:57 crc kubenswrapper[4644]: E1213 07:31:57.059828 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="519d3214-4add-4ce6-a572-05cd07ea0107" containerName="extract-utilities" Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.059833 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="519d3214-4add-4ce6-a572-05cd07ea0107" containerName="extract-utilities" Dec 13 07:31:57 crc kubenswrapper[4644]: E1213 07:31:57.059843 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="207e9f99-4eda-431a-831b-96bb485d3b0e" containerName="extract-content" Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.059848 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="207e9f99-4eda-431a-831b-96bb485d3b0e" containerName="extract-content" Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.060010 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="207e9f99-4eda-431a-831b-96bb485d3b0e" containerName="registry-server" Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.060028 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="519d3214-4add-4ce6-a572-05cd07ea0107" containerName="registry-server" Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.060589 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-859586489-hlw4r" Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.088596 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-859586489-hlw4r"] Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.220429 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5xtj\" (UniqueName: \"kubernetes.io/projected/24da4990-742e-476a-aa8b-0a30e8dc0930-kube-api-access-h5xtj\") pod \"openstack-operator-controller-operator-859586489-hlw4r\" (UID: \"24da4990-742e-476a-aa8b-0a30e8dc0930\") " pod="openstack-operators/openstack-operator-controller-operator-859586489-hlw4r" Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.321934 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5xtj\" (UniqueName: \"kubernetes.io/projected/24da4990-742e-476a-aa8b-0a30e8dc0930-kube-api-access-h5xtj\") pod \"openstack-operator-controller-operator-859586489-hlw4r\" (UID: \"24da4990-742e-476a-aa8b-0a30e8dc0930\") " pod="openstack-operators/openstack-operator-controller-operator-859586489-hlw4r" Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.340095 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5xtj\" (UniqueName: \"kubernetes.io/projected/24da4990-742e-476a-aa8b-0a30e8dc0930-kube-api-access-h5xtj\") pod \"openstack-operator-controller-operator-859586489-hlw4r\" (UID: \"24da4990-742e-476a-aa8b-0a30e8dc0930\") " pod="openstack-operators/openstack-operator-controller-operator-859586489-hlw4r" Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.378725 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-859586489-hlw4r" Dec 13 07:31:57 crc kubenswrapper[4644]: I1213 07:31:57.779759 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-859586489-hlw4r"] Dec 13 07:31:58 crc kubenswrapper[4644]: I1213 07:31:58.465470 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-859586489-hlw4r" event={"ID":"24da4990-742e-476a-aa8b-0a30e8dc0930","Type":"ContainerStarted","Data":"16d66bf93409d80b5a6bfed7bb24770a3e65f7fe2fee92e7e07fc630848198ef"} Dec 13 07:31:58 crc kubenswrapper[4644]: I1213 07:31:58.465848 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-859586489-hlw4r" event={"ID":"24da4990-742e-476a-aa8b-0a30e8dc0930","Type":"ContainerStarted","Data":"15ea67fbe38365a1480a4ba9c4b056f178e6c29b7a4a8f5468f3c2cec01358fe"} Dec 13 07:31:58 crc kubenswrapper[4644]: I1213 07:31:58.465872 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-859586489-hlw4r" Dec 13 07:31:58 crc kubenswrapper[4644]: I1213 07:31:58.496159 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-859586489-hlw4r" podStartSLOduration=1.496138074 podStartE2EDuration="1.496138074s" podCreationTimestamp="2025-12-13 07:31:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:31:58.486680723 +0000 UTC m=+2780.701631556" watchObservedRunningTime="2025-12-13 07:31:58.496138074 +0000 UTC m=+2780.711088897" Dec 13 07:32:07 crc kubenswrapper[4644]: I1213 07:32:07.380898 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-859586489-hlw4r" Dec 13 07:32:07 crc kubenswrapper[4644]: I1213 07:32:07.441546 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4"] Dec 13 07:32:07 crc kubenswrapper[4644]: I1213 07:32:07.441790 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4" podUID="6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2" containerName="operator" containerID="cri-o://8300d463a6e0016f6d9f254db89768683d663200470b9bb5e5fb784b997bdb7a" gracePeriod=10 Dec 13 07:32:07 crc kubenswrapper[4644]: I1213 07:32:07.816046 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4" Dec 13 07:32:07 crc kubenswrapper[4644]: I1213 07:32:07.942489 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbxf8\" (UniqueName: \"kubernetes.io/projected/6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2-kube-api-access-lbxf8\") pod \"6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2\" (UID: \"6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2\") " Dec 13 07:32:07 crc kubenswrapper[4644]: I1213 07:32:07.949776 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2-kube-api-access-lbxf8" (OuterVolumeSpecName: "kube-api-access-lbxf8") pod "6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2" (UID: "6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2"). InnerVolumeSpecName "kube-api-access-lbxf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:32:08 crc kubenswrapper[4644]: I1213 07:32:08.044844 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbxf8\" (UniqueName: \"kubernetes.io/projected/6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2-kube-api-access-lbxf8\") on node \"crc\" DevicePath \"\"" Dec 13 07:32:08 crc kubenswrapper[4644]: I1213 07:32:08.529032 4644 generic.go:334] "Generic (PLEG): container finished" podID="6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2" containerID="8300d463a6e0016f6d9f254db89768683d663200470b9bb5e5fb784b997bdb7a" exitCode=0 Dec 13 07:32:08 crc kubenswrapper[4644]: I1213 07:32:08.529069 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4" event={"ID":"6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2","Type":"ContainerDied","Data":"8300d463a6e0016f6d9f254db89768683d663200470b9bb5e5fb784b997bdb7a"} Dec 13 07:32:08 crc kubenswrapper[4644]: I1213 07:32:08.529092 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4" event={"ID":"6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2","Type":"ContainerDied","Data":"64c35f8fb4909c8acb7b5f776aec7f76ed6e9fe0a317e0930b76ce47c6954926"} Dec 13 07:32:08 crc kubenswrapper[4644]: I1213 07:32:08.529107 4644 scope.go:117] "RemoveContainer" containerID="8300d463a6e0016f6d9f254db89768683d663200470b9bb5e5fb784b997bdb7a" Dec 13 07:32:08 crc kubenswrapper[4644]: I1213 07:32:08.529104 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4" Dec 13 07:32:08 crc kubenswrapper[4644]: I1213 07:32:08.549371 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4"] Dec 13 07:32:08 crc kubenswrapper[4644]: I1213 07:32:08.551323 4644 scope.go:117] "RemoveContainer" containerID="8300d463a6e0016f6d9f254db89768683d663200470b9bb5e5fb784b997bdb7a" Dec 13 07:32:08 crc kubenswrapper[4644]: E1213 07:32:08.551837 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8300d463a6e0016f6d9f254db89768683d663200470b9bb5e5fb784b997bdb7a\": container with ID starting with 8300d463a6e0016f6d9f254db89768683d663200470b9bb5e5fb784b997bdb7a not found: ID does not exist" containerID="8300d463a6e0016f6d9f254db89768683d663200470b9bb5e5fb784b997bdb7a" Dec 13 07:32:08 crc kubenswrapper[4644]: I1213 07:32:08.551867 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8300d463a6e0016f6d9f254db89768683d663200470b9bb5e5fb784b997bdb7a"} err="failed to get container status \"8300d463a6e0016f6d9f254db89768683d663200470b9bb5e5fb784b997bdb7a\": rpc error: code = NotFound desc = could not find container \"8300d463a6e0016f6d9f254db89768683d663200470b9bb5e5fb784b997bdb7a\": container with ID starting with 8300d463a6e0016f6d9f254db89768683d663200470b9bb5e5fb784b997bdb7a not found: ID does not exist" Dec 13 07:32:08 crc kubenswrapper[4644]: I1213 07:32:08.555145 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-57bbbf4567-2s5l4"] Dec 13 07:32:09 crc kubenswrapper[4644]: I1213 07:32:09.753894 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:32:09 crc kubenswrapper[4644]: I1213 07:32:09.754130 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:32:09 crc kubenswrapper[4644]: I1213 07:32:09.754175 4644 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 07:32:09 crc kubenswrapper[4644]: I1213 07:32:09.754913 4644 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4f4b7bfe2613e6cda772a52ce14ce5049a7998ee5bbd61facee3957853dd67cc"} pod="openshift-machine-config-operator/machine-config-daemon-45tj4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 07:32:09 crc kubenswrapper[4644]: I1213 07:32:09.754960 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" containerID="cri-o://4f4b7bfe2613e6cda772a52ce14ce5049a7998ee5bbd61facee3957853dd67cc" gracePeriod=600 Dec 13 07:32:09 crc kubenswrapper[4644]: E1213 07:32:09.844584 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48240f19_087e_4597_b448_ab1a190a5027.slice/crio-4f4b7bfe2613e6cda772a52ce14ce5049a7998ee5bbd61facee3957853dd67cc.scope\": RecentStats: unable to find data in memory cache]" Dec 13 07:32:10 crc kubenswrapper[4644]: I1213 07:32:10.397553 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2" path="/var/lib/kubelet/pods/6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2/volumes" Dec 13 07:32:10 crc kubenswrapper[4644]: I1213 07:32:10.545230 4644 generic.go:334] "Generic (PLEG): container finished" podID="48240f19-087e-4597-b448-ab1a190a5027" containerID="4f4b7bfe2613e6cda772a52ce14ce5049a7998ee5bbd61facee3957853dd67cc" exitCode=0 Dec 13 07:32:10 crc kubenswrapper[4644]: I1213 07:32:10.545266 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerDied","Data":"4f4b7bfe2613e6cda772a52ce14ce5049a7998ee5bbd61facee3957853dd67cc"} Dec 13 07:32:10 crc kubenswrapper[4644]: I1213 07:32:10.545290 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerStarted","Data":"81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428"} Dec 13 07:32:10 crc kubenswrapper[4644]: I1213 07:32:10.545305 4644 scope.go:117] "RemoveContainer" containerID="385ff3d76f1acba7cb8b7003d07533173d58100e4fa914b27860acb7a0e7ffec" Dec 13 07:32:39 crc kubenswrapper[4644]: I1213 07:32:39.081978 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq"] Dec 13 07:32:39 crc kubenswrapper[4644]: E1213 07:32:39.082727 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2" containerName="operator" Dec 13 07:32:39 crc kubenswrapper[4644]: I1213 07:32:39.082741 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2" containerName="operator" Dec 13 07:32:39 crc kubenswrapper[4644]: I1213 07:32:39.082918 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a7ad2fa-50e8-4a33-8b81-96d9fdc315b2" containerName="operator" Dec 13 07:32:39 crc kubenswrapper[4644]: I1213 07:32:39.083477 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" Dec 13 07:32:39 crc kubenswrapper[4644]: I1213 07:32:39.093274 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq"] Dec 13 07:32:39 crc kubenswrapper[4644]: I1213 07:32:39.154873 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvtgd\" (UniqueName: \"kubernetes.io/projected/511296bd-fff8-49c1-bbfd-b702905f6e83-kube-api-access-kvtgd\") pod \"test-operator-controller-manager-9fc9c756c-8sjtq\" (UID: \"511296bd-fff8-49c1-bbfd-b702905f6e83\") " pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" Dec 13 07:32:39 crc kubenswrapper[4644]: I1213 07:32:39.257245 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvtgd\" (UniqueName: \"kubernetes.io/projected/511296bd-fff8-49c1-bbfd-b702905f6e83-kube-api-access-kvtgd\") pod \"test-operator-controller-manager-9fc9c756c-8sjtq\" (UID: \"511296bd-fff8-49c1-bbfd-b702905f6e83\") " pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" Dec 13 07:32:39 crc kubenswrapper[4644]: I1213 07:32:39.272435 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvtgd\" (UniqueName: \"kubernetes.io/projected/511296bd-fff8-49c1-bbfd-b702905f6e83-kube-api-access-kvtgd\") pod \"test-operator-controller-manager-9fc9c756c-8sjtq\" (UID: \"511296bd-fff8-49c1-bbfd-b702905f6e83\") " pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" Dec 13 07:32:39 crc kubenswrapper[4644]: I1213 07:32:39.398999 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" Dec 13 07:32:39 crc kubenswrapper[4644]: I1213 07:32:39.777958 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq"] Dec 13 07:32:39 crc kubenswrapper[4644]: I1213 07:32:39.782876 4644 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 07:32:39 crc kubenswrapper[4644]: I1213 07:32:39.817101 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" event={"ID":"511296bd-fff8-49c1-bbfd-b702905f6e83","Type":"ContainerStarted","Data":"b6d43b3cf45d5192f0e768e00de24c9f172bb5d3bfdea36b1036078f10626166"} Dec 13 07:34:39 crc kubenswrapper[4644]: I1213 07:34:39.754113 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:34:39 crc kubenswrapper[4644]: I1213 07:34:39.755090 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:34:39 crc kubenswrapper[4644]: E1213 07:34:39.787450 4644 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \"http://38.129.56.153:5001/v2/\": dial tcp 38.129.56.153:5001: i/o timeout" image="38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca" Dec 13 07:34:39 crc kubenswrapper[4644]: E1213 07:34:39.787504 4644 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \"http://38.129.56.153:5001/v2/\": dial tcp 38.129.56.153:5001: i/o timeout" image="38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca" Dec 13 07:34:39 crc kubenswrapper[4644]: E1213 07:34:39.787621 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kvtgd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-9fc9c756c-8sjtq_openstack-operators(511296bd-fff8-49c1-bbfd-b702905f6e83): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \"http://38.129.56.153:5001/v2/\": dial tcp 38.129.56.153:5001: i/o timeout" logger="UnhandledError" Dec 13 07:34:39 crc kubenswrapper[4644]: E1213 07:34:39.789515 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \\\"http://38.129.56.153:5001/v2/\\\": dial tcp 38.129.56.153:5001: i/o timeout\"" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" podUID="511296bd-fff8-49c1-bbfd-b702905f6e83" Dec 13 07:34:40 crc kubenswrapper[4644]: E1213 07:34:40.646236 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca\\\"\"" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" podUID="511296bd-fff8-49c1-bbfd-b702905f6e83" Dec 13 07:35:09 crc kubenswrapper[4644]: I1213 07:35:09.753716 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:35:09 crc kubenswrapper[4644]: I1213 07:35:09.755009 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:35:35 crc kubenswrapper[4644]: I1213 07:35:35.940475 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zgl5r"] Dec 13 07:35:35 crc kubenswrapper[4644]: I1213 07:35:35.942472 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:35 crc kubenswrapper[4644]: I1213 07:35:35.954491 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zgl5r"] Dec 13 07:35:36 crc kubenswrapper[4644]: I1213 07:35:36.050490 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mndx5\" (UniqueName: \"kubernetes.io/projected/fb946573-a59d-400b-a559-069edd1ffafa-kube-api-access-mndx5\") pod \"certified-operators-zgl5r\" (UID: \"fb946573-a59d-400b-a559-069edd1ffafa\") " pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:36 crc kubenswrapper[4644]: I1213 07:35:36.051096 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb946573-a59d-400b-a559-069edd1ffafa-catalog-content\") pod \"certified-operators-zgl5r\" (UID: \"fb946573-a59d-400b-a559-069edd1ffafa\") " pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:36 crc kubenswrapper[4644]: I1213 07:35:36.051140 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb946573-a59d-400b-a559-069edd1ffafa-utilities\") pod \"certified-operators-zgl5r\" (UID: \"fb946573-a59d-400b-a559-069edd1ffafa\") " pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:36 crc kubenswrapper[4644]: I1213 07:35:36.153765 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb946573-a59d-400b-a559-069edd1ffafa-catalog-content\") pod \"certified-operators-zgl5r\" (UID: \"fb946573-a59d-400b-a559-069edd1ffafa\") " pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:36 crc kubenswrapper[4644]: I1213 07:35:36.153814 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb946573-a59d-400b-a559-069edd1ffafa-utilities\") pod \"certified-operators-zgl5r\" (UID: \"fb946573-a59d-400b-a559-069edd1ffafa\") " pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:36 crc kubenswrapper[4644]: I1213 07:35:36.154111 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mndx5\" (UniqueName: \"kubernetes.io/projected/fb946573-a59d-400b-a559-069edd1ffafa-kube-api-access-mndx5\") pod \"certified-operators-zgl5r\" (UID: \"fb946573-a59d-400b-a559-069edd1ffafa\") " pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:36 crc kubenswrapper[4644]: I1213 07:35:36.154223 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb946573-a59d-400b-a559-069edd1ffafa-catalog-content\") pod \"certified-operators-zgl5r\" (UID: \"fb946573-a59d-400b-a559-069edd1ffafa\") " pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:36 crc kubenswrapper[4644]: I1213 07:35:36.154292 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb946573-a59d-400b-a559-069edd1ffafa-utilities\") pod \"certified-operators-zgl5r\" (UID: \"fb946573-a59d-400b-a559-069edd1ffafa\") " pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:36 crc kubenswrapper[4644]: I1213 07:35:36.169938 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mndx5\" (UniqueName: \"kubernetes.io/projected/fb946573-a59d-400b-a559-069edd1ffafa-kube-api-access-mndx5\") pod \"certified-operators-zgl5r\" (UID: \"fb946573-a59d-400b-a559-069edd1ffafa\") " pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:36 crc kubenswrapper[4644]: I1213 07:35:36.262200 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:36 crc kubenswrapper[4644]: I1213 07:35:36.712794 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zgl5r"] Dec 13 07:35:37 crc kubenswrapper[4644]: I1213 07:35:37.030852 4644 generic.go:334] "Generic (PLEG): container finished" podID="fb946573-a59d-400b-a559-069edd1ffafa" containerID="02f42e6ce4897d7e85b2315daf3bdca8170efeb5430c5ce9c30ec2ece99f5e63" exitCode=0 Dec 13 07:35:37 crc kubenswrapper[4644]: I1213 07:35:37.030894 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgl5r" event={"ID":"fb946573-a59d-400b-a559-069edd1ffafa","Type":"ContainerDied","Data":"02f42e6ce4897d7e85b2315daf3bdca8170efeb5430c5ce9c30ec2ece99f5e63"} Dec 13 07:35:37 crc kubenswrapper[4644]: I1213 07:35:37.030931 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgl5r" event={"ID":"fb946573-a59d-400b-a559-069edd1ffafa","Type":"ContainerStarted","Data":"61101894ed9f0b02d3168694e1d5809bfdcb23249fa4b466c7769ef22bb0e194"} Dec 13 07:35:37 crc kubenswrapper[4644]: I1213 07:35:37.738317 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6fdth"] Dec 13 07:35:37 crc kubenswrapper[4644]: I1213 07:35:37.739856 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6fdth" Dec 13 07:35:37 crc kubenswrapper[4644]: I1213 07:35:37.746173 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6fdth"] Dec 13 07:35:37 crc kubenswrapper[4644]: I1213 07:35:37.879160 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ec3a7e1-b616-4d72-8400-905ef791e7fc-catalog-content\") pod \"community-operators-6fdth\" (UID: \"2ec3a7e1-b616-4d72-8400-905ef791e7fc\") " pod="openshift-marketplace/community-operators-6fdth" Dec 13 07:35:37 crc kubenswrapper[4644]: I1213 07:35:37.879224 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28sfh\" (UniqueName: \"kubernetes.io/projected/2ec3a7e1-b616-4d72-8400-905ef791e7fc-kube-api-access-28sfh\") pod \"community-operators-6fdth\" (UID: \"2ec3a7e1-b616-4d72-8400-905ef791e7fc\") " pod="openshift-marketplace/community-operators-6fdth" Dec 13 07:35:37 crc kubenswrapper[4644]: I1213 07:35:37.879741 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ec3a7e1-b616-4d72-8400-905ef791e7fc-utilities\") pod \"community-operators-6fdth\" (UID: \"2ec3a7e1-b616-4d72-8400-905ef791e7fc\") " pod="openshift-marketplace/community-operators-6fdth" Dec 13 07:35:37 crc kubenswrapper[4644]: I1213 07:35:37.982459 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ec3a7e1-b616-4d72-8400-905ef791e7fc-catalog-content\") pod \"community-operators-6fdth\" (UID: \"2ec3a7e1-b616-4d72-8400-905ef791e7fc\") " pod="openshift-marketplace/community-operators-6fdth" Dec 13 07:35:37 crc kubenswrapper[4644]: I1213 07:35:37.982712 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28sfh\" (UniqueName: \"kubernetes.io/projected/2ec3a7e1-b616-4d72-8400-905ef791e7fc-kube-api-access-28sfh\") pod \"community-operators-6fdth\" (UID: \"2ec3a7e1-b616-4d72-8400-905ef791e7fc\") " pod="openshift-marketplace/community-operators-6fdth" Dec 13 07:35:37 crc kubenswrapper[4644]: I1213 07:35:37.982832 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ec3a7e1-b616-4d72-8400-905ef791e7fc-utilities\") pod \"community-operators-6fdth\" (UID: \"2ec3a7e1-b616-4d72-8400-905ef791e7fc\") " pod="openshift-marketplace/community-operators-6fdth" Dec 13 07:35:37 crc kubenswrapper[4644]: I1213 07:35:37.982887 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ec3a7e1-b616-4d72-8400-905ef791e7fc-catalog-content\") pod \"community-operators-6fdth\" (UID: \"2ec3a7e1-b616-4d72-8400-905ef791e7fc\") " pod="openshift-marketplace/community-operators-6fdth" Dec 13 07:35:37 crc kubenswrapper[4644]: I1213 07:35:37.983187 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ec3a7e1-b616-4d72-8400-905ef791e7fc-utilities\") pod \"community-operators-6fdth\" (UID: \"2ec3a7e1-b616-4d72-8400-905ef791e7fc\") " pod="openshift-marketplace/community-operators-6fdth" Dec 13 07:35:38 crc kubenswrapper[4644]: I1213 07:35:38.002060 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28sfh\" (UniqueName: \"kubernetes.io/projected/2ec3a7e1-b616-4d72-8400-905ef791e7fc-kube-api-access-28sfh\") pod \"community-operators-6fdth\" (UID: \"2ec3a7e1-b616-4d72-8400-905ef791e7fc\") " pod="openshift-marketplace/community-operators-6fdth" Dec 13 07:35:38 crc kubenswrapper[4644]: I1213 07:35:38.064795 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6fdth" Dec 13 07:35:38 crc kubenswrapper[4644]: I1213 07:35:38.513261 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6fdth"] Dec 13 07:35:38 crc kubenswrapper[4644]: W1213 07:35:38.514829 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ec3a7e1_b616_4d72_8400_905ef791e7fc.slice/crio-67919810adb3301faed7879a9960eb03167605f78cdd1310eab6d58f5b230402 WatchSource:0}: Error finding container 67919810adb3301faed7879a9960eb03167605f78cdd1310eab6d58f5b230402: Status 404 returned error can't find the container with id 67919810adb3301faed7879a9960eb03167605f78cdd1310eab6d58f5b230402 Dec 13 07:35:39 crc kubenswrapper[4644]: I1213 07:35:39.046269 4644 generic.go:334] "Generic (PLEG): container finished" podID="2ec3a7e1-b616-4d72-8400-905ef791e7fc" containerID="2f8727da4e06e514ce50908681ef851f4a0d973f29572f16aba068e348f0e6cc" exitCode=0 Dec 13 07:35:39 crc kubenswrapper[4644]: I1213 07:35:39.046374 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6fdth" event={"ID":"2ec3a7e1-b616-4d72-8400-905ef791e7fc","Type":"ContainerDied","Data":"2f8727da4e06e514ce50908681ef851f4a0d973f29572f16aba068e348f0e6cc"} Dec 13 07:35:39 crc kubenswrapper[4644]: I1213 07:35:39.046610 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6fdth" event={"ID":"2ec3a7e1-b616-4d72-8400-905ef791e7fc","Type":"ContainerStarted","Data":"67919810adb3301faed7879a9960eb03167605f78cdd1310eab6d58f5b230402"} Dec 13 07:35:39 crc kubenswrapper[4644]: I1213 07:35:39.048821 4644 generic.go:334] "Generic (PLEG): container finished" podID="fb946573-a59d-400b-a559-069edd1ffafa" containerID="a24ed257af60a0097ce9f936e6c6d1db4a439f9cbec35210c9eb1ade81d3810c" exitCode=0 Dec 13 07:35:39 crc kubenswrapper[4644]: I1213 07:35:39.048858 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgl5r" event={"ID":"fb946573-a59d-400b-a559-069edd1ffafa","Type":"ContainerDied","Data":"a24ed257af60a0097ce9f936e6c6d1db4a439f9cbec35210c9eb1ade81d3810c"} Dec 13 07:35:39 crc kubenswrapper[4644]: I1213 07:35:39.754195 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:35:39 crc kubenswrapper[4644]: I1213 07:35:39.754475 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:35:39 crc kubenswrapper[4644]: I1213 07:35:39.754522 4644 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" Dec 13 07:35:39 crc kubenswrapper[4644]: I1213 07:35:39.755244 4644 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428"} pod="openshift-machine-config-operator/machine-config-daemon-45tj4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 07:35:39 crc kubenswrapper[4644]: I1213 07:35:39.755284 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" containerID="cri-o://81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" gracePeriod=600 Dec 13 07:35:39 crc kubenswrapper[4644]: E1213 07:35:39.882465 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:35:40 crc kubenswrapper[4644]: I1213 07:35:40.059459 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgl5r" event={"ID":"fb946573-a59d-400b-a559-069edd1ffafa","Type":"ContainerStarted","Data":"1f86d8a4583181f90329153266400c6781ef1557d37696826884131a1eb68f67"} Dec 13 07:35:40 crc kubenswrapper[4644]: I1213 07:35:40.067431 4644 generic.go:334] "Generic (PLEG): container finished" podID="48240f19-087e-4597-b448-ab1a190a5027" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" exitCode=0 Dec 13 07:35:40 crc kubenswrapper[4644]: I1213 07:35:40.067476 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerDied","Data":"81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428"} Dec 13 07:35:40 crc kubenswrapper[4644]: I1213 07:35:40.067497 4644 scope.go:117] "RemoveContainer" containerID="4f4b7bfe2613e6cda772a52ce14ce5049a7998ee5bbd61facee3957853dd67cc" Dec 13 07:35:40 crc kubenswrapper[4644]: I1213 07:35:40.067841 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:35:40 crc kubenswrapper[4644]: E1213 07:35:40.068029 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:35:40 crc kubenswrapper[4644]: I1213 07:35:40.091181 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zgl5r" podStartSLOduration=2.412608391 podStartE2EDuration="5.091146514s" podCreationTimestamp="2025-12-13 07:35:35 +0000 UTC" firstStartedPulling="2025-12-13 07:35:37.032246536 +0000 UTC m=+2999.247197369" lastFinishedPulling="2025-12-13 07:35:39.710784659 +0000 UTC m=+3001.925735492" observedRunningTime="2025-12-13 07:35:40.079660989 +0000 UTC m=+3002.294611822" watchObservedRunningTime="2025-12-13 07:35:40.091146514 +0000 UTC m=+3002.306097347" Dec 13 07:35:43 crc kubenswrapper[4644]: I1213 07:35:43.093050 4644 generic.go:334] "Generic (PLEG): container finished" podID="2ec3a7e1-b616-4d72-8400-905ef791e7fc" containerID="9221d121aa99dbf68b52b40367d88ffd5f5f4dc7f3e9f770f8180274abbf94d5" exitCode=0 Dec 13 07:35:43 crc kubenswrapper[4644]: I1213 07:35:43.093129 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6fdth" event={"ID":"2ec3a7e1-b616-4d72-8400-905ef791e7fc","Type":"ContainerDied","Data":"9221d121aa99dbf68b52b40367d88ffd5f5f4dc7f3e9f770f8180274abbf94d5"} Dec 13 07:35:44 crc kubenswrapper[4644]: I1213 07:35:44.102803 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6fdth" event={"ID":"2ec3a7e1-b616-4d72-8400-905ef791e7fc","Type":"ContainerStarted","Data":"d8172d6a5e67987e0fd28de60d15ac5825a55fb481f36ccd5bf1c00637f8bfaa"} Dec 13 07:35:44 crc kubenswrapper[4644]: I1213 07:35:44.121563 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6fdth" podStartSLOduration=2.540641285 podStartE2EDuration="7.121547327s" podCreationTimestamp="2025-12-13 07:35:37 +0000 UTC" firstStartedPulling="2025-12-13 07:35:39.047984508 +0000 UTC m=+3001.262935341" lastFinishedPulling="2025-12-13 07:35:43.628890549 +0000 UTC m=+3005.843841383" observedRunningTime="2025-12-13 07:35:44.117584195 +0000 UTC m=+3006.332535029" watchObservedRunningTime="2025-12-13 07:35:44.121547327 +0000 UTC m=+3006.336498161" Dec 13 07:35:46 crc kubenswrapper[4644]: I1213 07:35:46.263267 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:46 crc kubenswrapper[4644]: I1213 07:35:46.263958 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:46 crc kubenswrapper[4644]: I1213 07:35:46.300251 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:47 crc kubenswrapper[4644]: I1213 07:35:47.161043 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:47 crc kubenswrapper[4644]: I1213 07:35:47.203422 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zgl5r"] Dec 13 07:35:48 crc kubenswrapper[4644]: I1213 07:35:48.065121 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6fdth" Dec 13 07:35:48 crc kubenswrapper[4644]: I1213 07:35:48.065458 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6fdth" Dec 13 07:35:48 crc kubenswrapper[4644]: I1213 07:35:48.097931 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6fdth" Dec 13 07:35:48 crc kubenswrapper[4644]: I1213 07:35:48.161817 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6fdth" Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.139601 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zgl5r" podUID="fb946573-a59d-400b-a559-069edd1ffafa" containerName="registry-server" containerID="cri-o://1f86d8a4583181f90329153266400c6781ef1557d37696826884131a1eb68f67" gracePeriod=2 Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.356716 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6fdth"] Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.532595 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.541575 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wmj6h"] Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.541762 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wmj6h" podUID="e17683d9-a928-4409-ad53-736c7c243d29" containerName="registry-server" containerID="cri-o://28d78b031efc82ab6667110ff0ef02e0f8b978428b686c8c55ec69b36c0bf506" gracePeriod=2 Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.719072 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb946573-a59d-400b-a559-069edd1ffafa-utilities\") pod \"fb946573-a59d-400b-a559-069edd1ffafa\" (UID: \"fb946573-a59d-400b-a559-069edd1ffafa\") " Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.719238 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb946573-a59d-400b-a559-069edd1ffafa-catalog-content\") pod \"fb946573-a59d-400b-a559-069edd1ffafa\" (UID: \"fb946573-a59d-400b-a559-069edd1ffafa\") " Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.719435 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mndx5\" (UniqueName: \"kubernetes.io/projected/fb946573-a59d-400b-a559-069edd1ffafa-kube-api-access-mndx5\") pod \"fb946573-a59d-400b-a559-069edd1ffafa\" (UID: \"fb946573-a59d-400b-a559-069edd1ffafa\") " Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.720266 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb946573-a59d-400b-a559-069edd1ffafa-utilities" (OuterVolumeSpecName: "utilities") pod "fb946573-a59d-400b-a559-069edd1ffafa" (UID: "fb946573-a59d-400b-a559-069edd1ffafa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.726990 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb946573-a59d-400b-a559-069edd1ffafa-kube-api-access-mndx5" (OuterVolumeSpecName: "kube-api-access-mndx5") pod "fb946573-a59d-400b-a559-069edd1ffafa" (UID: "fb946573-a59d-400b-a559-069edd1ffafa"). InnerVolumeSpecName "kube-api-access-mndx5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.767474 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb946573-a59d-400b-a559-069edd1ffafa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fb946573-a59d-400b-a559-069edd1ffafa" (UID: "fb946573-a59d-400b-a559-069edd1ffafa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.823204 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb946573-a59d-400b-a559-069edd1ffafa-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.823234 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb946573-a59d-400b-a559-069edd1ffafa-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.823246 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mndx5\" (UniqueName: \"kubernetes.io/projected/fb946573-a59d-400b-a559-069edd1ffafa-kube-api-access-mndx5\") on node \"crc\" DevicePath \"\"" Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.890388 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wmj6h" Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.923827 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf5ms\" (UniqueName: \"kubernetes.io/projected/e17683d9-a928-4409-ad53-736c7c243d29-kube-api-access-zf5ms\") pod \"e17683d9-a928-4409-ad53-736c7c243d29\" (UID: \"e17683d9-a928-4409-ad53-736c7c243d29\") " Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.924338 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e17683d9-a928-4409-ad53-736c7c243d29-utilities\") pod \"e17683d9-a928-4409-ad53-736c7c243d29\" (UID: \"e17683d9-a928-4409-ad53-736c7c243d29\") " Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.924377 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e17683d9-a928-4409-ad53-736c7c243d29-catalog-content\") pod \"e17683d9-a928-4409-ad53-736c7c243d29\" (UID: \"e17683d9-a928-4409-ad53-736c7c243d29\") " Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.924793 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e17683d9-a928-4409-ad53-736c7c243d29-utilities" (OuterVolumeSpecName: "utilities") pod "e17683d9-a928-4409-ad53-736c7c243d29" (UID: "e17683d9-a928-4409-ad53-736c7c243d29"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.926194 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e17683d9-a928-4409-ad53-736c7c243d29-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.927015 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e17683d9-a928-4409-ad53-736c7c243d29-kube-api-access-zf5ms" (OuterVolumeSpecName: "kube-api-access-zf5ms") pod "e17683d9-a928-4409-ad53-736c7c243d29" (UID: "e17683d9-a928-4409-ad53-736c7c243d29"). InnerVolumeSpecName "kube-api-access-zf5ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:35:49 crc kubenswrapper[4644]: I1213 07:35:49.958705 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e17683d9-a928-4409-ad53-736c7c243d29-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e17683d9-a928-4409-ad53-736c7c243d29" (UID: "e17683d9-a928-4409-ad53-736c7c243d29"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.027960 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e17683d9-a928-4409-ad53-736c7c243d29-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.028364 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf5ms\" (UniqueName: \"kubernetes.io/projected/e17683d9-a928-4409-ad53-736c7c243d29-kube-api-access-zf5ms\") on node \"crc\" DevicePath \"\"" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.152004 4644 generic.go:334] "Generic (PLEG): container finished" podID="e17683d9-a928-4409-ad53-736c7c243d29" containerID="28d78b031efc82ab6667110ff0ef02e0f8b978428b686c8c55ec69b36c0bf506" exitCode=0 Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.152069 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wmj6h" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.152098 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wmj6h" event={"ID":"e17683d9-a928-4409-ad53-736c7c243d29","Type":"ContainerDied","Data":"28d78b031efc82ab6667110ff0ef02e0f8b978428b686c8c55ec69b36c0bf506"} Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.152168 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wmj6h" event={"ID":"e17683d9-a928-4409-ad53-736c7c243d29","Type":"ContainerDied","Data":"ea075e4a9f5bb28a14a03fc2e0a0fd8e9dbad6b6e380979453c95e0ae79aa0b2"} Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.152187 4644 scope.go:117] "RemoveContainer" containerID="28d78b031efc82ab6667110ff0ef02e0f8b978428b686c8c55ec69b36c0bf506" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.155039 4644 generic.go:334] "Generic (PLEG): container finished" podID="fb946573-a59d-400b-a559-069edd1ffafa" containerID="1f86d8a4583181f90329153266400c6781ef1557d37696826884131a1eb68f67" exitCode=0 Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.155125 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zgl5r" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.155197 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgl5r" event={"ID":"fb946573-a59d-400b-a559-069edd1ffafa","Type":"ContainerDied","Data":"1f86d8a4583181f90329153266400c6781ef1557d37696826884131a1eb68f67"} Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.155284 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zgl5r" event={"ID":"fb946573-a59d-400b-a559-069edd1ffafa","Type":"ContainerDied","Data":"61101894ed9f0b02d3168694e1d5809bfdcb23249fa4b466c7769ef22bb0e194"} Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.176766 4644 scope.go:117] "RemoveContainer" containerID="7295c88ce97145e443a515200795bd59fee46e28a29af17f61316adb44e6afae" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.178956 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wmj6h"] Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.185933 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wmj6h"] Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.194109 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zgl5r"] Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.202316 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zgl5r"] Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.206041 4644 scope.go:117] "RemoveContainer" containerID="3a819f98b5347f09d3bb2bd5b518c9763c01c52b607ba1bf72bca80dfaebc2e8" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.222797 4644 scope.go:117] "RemoveContainer" containerID="28d78b031efc82ab6667110ff0ef02e0f8b978428b686c8c55ec69b36c0bf506" Dec 13 07:35:50 crc kubenswrapper[4644]: E1213 07:35:50.223185 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28d78b031efc82ab6667110ff0ef02e0f8b978428b686c8c55ec69b36c0bf506\": container with ID starting with 28d78b031efc82ab6667110ff0ef02e0f8b978428b686c8c55ec69b36c0bf506 not found: ID does not exist" containerID="28d78b031efc82ab6667110ff0ef02e0f8b978428b686c8c55ec69b36c0bf506" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.223216 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28d78b031efc82ab6667110ff0ef02e0f8b978428b686c8c55ec69b36c0bf506"} err="failed to get container status \"28d78b031efc82ab6667110ff0ef02e0f8b978428b686c8c55ec69b36c0bf506\": rpc error: code = NotFound desc = could not find container \"28d78b031efc82ab6667110ff0ef02e0f8b978428b686c8c55ec69b36c0bf506\": container with ID starting with 28d78b031efc82ab6667110ff0ef02e0f8b978428b686c8c55ec69b36c0bf506 not found: ID does not exist" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.223235 4644 scope.go:117] "RemoveContainer" containerID="7295c88ce97145e443a515200795bd59fee46e28a29af17f61316adb44e6afae" Dec 13 07:35:50 crc kubenswrapper[4644]: E1213 07:35:50.223600 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7295c88ce97145e443a515200795bd59fee46e28a29af17f61316adb44e6afae\": container with ID starting with 7295c88ce97145e443a515200795bd59fee46e28a29af17f61316adb44e6afae not found: ID does not exist" containerID="7295c88ce97145e443a515200795bd59fee46e28a29af17f61316adb44e6afae" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.223631 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7295c88ce97145e443a515200795bd59fee46e28a29af17f61316adb44e6afae"} err="failed to get container status \"7295c88ce97145e443a515200795bd59fee46e28a29af17f61316adb44e6afae\": rpc error: code = NotFound desc = could not find container \"7295c88ce97145e443a515200795bd59fee46e28a29af17f61316adb44e6afae\": container with ID starting with 7295c88ce97145e443a515200795bd59fee46e28a29af17f61316adb44e6afae not found: ID does not exist" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.223655 4644 scope.go:117] "RemoveContainer" containerID="3a819f98b5347f09d3bb2bd5b518c9763c01c52b607ba1bf72bca80dfaebc2e8" Dec 13 07:35:50 crc kubenswrapper[4644]: E1213 07:35:50.223967 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a819f98b5347f09d3bb2bd5b518c9763c01c52b607ba1bf72bca80dfaebc2e8\": container with ID starting with 3a819f98b5347f09d3bb2bd5b518c9763c01c52b607ba1bf72bca80dfaebc2e8 not found: ID does not exist" containerID="3a819f98b5347f09d3bb2bd5b518c9763c01c52b607ba1bf72bca80dfaebc2e8" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.223999 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a819f98b5347f09d3bb2bd5b518c9763c01c52b607ba1bf72bca80dfaebc2e8"} err="failed to get container status \"3a819f98b5347f09d3bb2bd5b518c9763c01c52b607ba1bf72bca80dfaebc2e8\": rpc error: code = NotFound desc = could not find container \"3a819f98b5347f09d3bb2bd5b518c9763c01c52b607ba1bf72bca80dfaebc2e8\": container with ID starting with 3a819f98b5347f09d3bb2bd5b518c9763c01c52b607ba1bf72bca80dfaebc2e8 not found: ID does not exist" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.224019 4644 scope.go:117] "RemoveContainer" containerID="1f86d8a4583181f90329153266400c6781ef1557d37696826884131a1eb68f67" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.239870 4644 scope.go:117] "RemoveContainer" containerID="a24ed257af60a0097ce9f936e6c6d1db4a439f9cbec35210c9eb1ade81d3810c" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.254050 4644 scope.go:117] "RemoveContainer" containerID="02f42e6ce4897d7e85b2315daf3bdca8170efeb5430c5ce9c30ec2ece99f5e63" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.297781 4644 scope.go:117] "RemoveContainer" containerID="1f86d8a4583181f90329153266400c6781ef1557d37696826884131a1eb68f67" Dec 13 07:35:50 crc kubenswrapper[4644]: E1213 07:35:50.298202 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f86d8a4583181f90329153266400c6781ef1557d37696826884131a1eb68f67\": container with ID starting with 1f86d8a4583181f90329153266400c6781ef1557d37696826884131a1eb68f67 not found: ID does not exist" containerID="1f86d8a4583181f90329153266400c6781ef1557d37696826884131a1eb68f67" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.298265 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f86d8a4583181f90329153266400c6781ef1557d37696826884131a1eb68f67"} err="failed to get container status \"1f86d8a4583181f90329153266400c6781ef1557d37696826884131a1eb68f67\": rpc error: code = NotFound desc = could not find container \"1f86d8a4583181f90329153266400c6781ef1557d37696826884131a1eb68f67\": container with ID starting with 1f86d8a4583181f90329153266400c6781ef1557d37696826884131a1eb68f67 not found: ID does not exist" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.298312 4644 scope.go:117] "RemoveContainer" containerID="a24ed257af60a0097ce9f936e6c6d1db4a439f9cbec35210c9eb1ade81d3810c" Dec 13 07:35:50 crc kubenswrapper[4644]: E1213 07:35:50.298854 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a24ed257af60a0097ce9f936e6c6d1db4a439f9cbec35210c9eb1ade81d3810c\": container with ID starting with a24ed257af60a0097ce9f936e6c6d1db4a439f9cbec35210c9eb1ade81d3810c not found: ID does not exist" containerID="a24ed257af60a0097ce9f936e6c6d1db4a439f9cbec35210c9eb1ade81d3810c" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.298891 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a24ed257af60a0097ce9f936e6c6d1db4a439f9cbec35210c9eb1ade81d3810c"} err="failed to get container status \"a24ed257af60a0097ce9f936e6c6d1db4a439f9cbec35210c9eb1ade81d3810c\": rpc error: code = NotFound desc = could not find container \"a24ed257af60a0097ce9f936e6c6d1db4a439f9cbec35210c9eb1ade81d3810c\": container with ID starting with a24ed257af60a0097ce9f936e6c6d1db4a439f9cbec35210c9eb1ade81d3810c not found: ID does not exist" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.298910 4644 scope.go:117] "RemoveContainer" containerID="02f42e6ce4897d7e85b2315daf3bdca8170efeb5430c5ce9c30ec2ece99f5e63" Dec 13 07:35:50 crc kubenswrapper[4644]: E1213 07:35:50.299278 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02f42e6ce4897d7e85b2315daf3bdca8170efeb5430c5ce9c30ec2ece99f5e63\": container with ID starting with 02f42e6ce4897d7e85b2315daf3bdca8170efeb5430c5ce9c30ec2ece99f5e63 not found: ID does not exist" containerID="02f42e6ce4897d7e85b2315daf3bdca8170efeb5430c5ce9c30ec2ece99f5e63" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.299314 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02f42e6ce4897d7e85b2315daf3bdca8170efeb5430c5ce9c30ec2ece99f5e63"} err="failed to get container status \"02f42e6ce4897d7e85b2315daf3bdca8170efeb5430c5ce9c30ec2ece99f5e63\": rpc error: code = NotFound desc = could not find container \"02f42e6ce4897d7e85b2315daf3bdca8170efeb5430c5ce9c30ec2ece99f5e63\": container with ID starting with 02f42e6ce4897d7e85b2315daf3bdca8170efeb5430c5ce9c30ec2ece99f5e63 not found: ID does not exist" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.398798 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e17683d9-a928-4409-ad53-736c7c243d29" path="/var/lib/kubelet/pods/e17683d9-a928-4409-ad53-736c7c243d29/volumes" Dec 13 07:35:50 crc kubenswrapper[4644]: I1213 07:35:50.399717 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb946573-a59d-400b-a559-069edd1ffafa" path="/var/lib/kubelet/pods/fb946573-a59d-400b-a559-069edd1ffafa/volumes" Dec 13 07:35:52 crc kubenswrapper[4644]: I1213 07:35:52.390326 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:35:52 crc kubenswrapper[4644]: E1213 07:35:52.390858 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:35:58 crc kubenswrapper[4644]: I1213 07:35:58.950789 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-tftvv/must-gather-z4kh6"] Dec 13 07:35:58 crc kubenswrapper[4644]: E1213 07:35:58.951539 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb946573-a59d-400b-a559-069edd1ffafa" containerName="registry-server" Dec 13 07:35:58 crc kubenswrapper[4644]: I1213 07:35:58.951551 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb946573-a59d-400b-a559-069edd1ffafa" containerName="registry-server" Dec 13 07:35:58 crc kubenswrapper[4644]: E1213 07:35:58.951571 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e17683d9-a928-4409-ad53-736c7c243d29" containerName="registry-server" Dec 13 07:35:58 crc kubenswrapper[4644]: I1213 07:35:58.951577 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="e17683d9-a928-4409-ad53-736c7c243d29" containerName="registry-server" Dec 13 07:35:58 crc kubenswrapper[4644]: E1213 07:35:58.951592 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb946573-a59d-400b-a559-069edd1ffafa" containerName="extract-content" Dec 13 07:35:58 crc kubenswrapper[4644]: I1213 07:35:58.951597 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb946573-a59d-400b-a559-069edd1ffafa" containerName="extract-content" Dec 13 07:35:58 crc kubenswrapper[4644]: E1213 07:35:58.951607 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e17683d9-a928-4409-ad53-736c7c243d29" containerName="extract-content" Dec 13 07:35:58 crc kubenswrapper[4644]: I1213 07:35:58.951612 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="e17683d9-a928-4409-ad53-736c7c243d29" containerName="extract-content" Dec 13 07:35:58 crc kubenswrapper[4644]: E1213 07:35:58.951622 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e17683d9-a928-4409-ad53-736c7c243d29" containerName="extract-utilities" Dec 13 07:35:58 crc kubenswrapper[4644]: I1213 07:35:58.951628 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="e17683d9-a928-4409-ad53-736c7c243d29" containerName="extract-utilities" Dec 13 07:35:58 crc kubenswrapper[4644]: E1213 07:35:58.951639 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb946573-a59d-400b-a559-069edd1ffafa" containerName="extract-utilities" Dec 13 07:35:58 crc kubenswrapper[4644]: I1213 07:35:58.951645 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb946573-a59d-400b-a559-069edd1ffafa" containerName="extract-utilities" Dec 13 07:35:58 crc kubenswrapper[4644]: I1213 07:35:58.951782 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb946573-a59d-400b-a559-069edd1ffafa" containerName="registry-server" Dec 13 07:35:58 crc kubenswrapper[4644]: I1213 07:35:58.951800 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="e17683d9-a928-4409-ad53-736c7c243d29" containerName="registry-server" Dec 13 07:35:58 crc kubenswrapper[4644]: I1213 07:35:58.952574 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tftvv/must-gather-z4kh6" Dec 13 07:35:58 crc kubenswrapper[4644]: I1213 07:35:58.955050 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-tftvv"/"openshift-service-ca.crt" Dec 13 07:35:58 crc kubenswrapper[4644]: I1213 07:35:58.955185 4644 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-tftvv"/"default-dockercfg-nqpvs" Dec 13 07:35:58 crc kubenswrapper[4644]: I1213 07:35:58.955737 4644 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-tftvv"/"kube-root-ca.crt" Dec 13 07:35:58 crc kubenswrapper[4644]: I1213 07:35:58.973822 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-tftvv/must-gather-z4kh6"] Dec 13 07:35:59 crc kubenswrapper[4644]: I1213 07:35:59.002063 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnlmb\" (UniqueName: \"kubernetes.io/projected/fc6552aa-3834-4872-ac30-8196fd0dc80b-kube-api-access-xnlmb\") pod \"must-gather-z4kh6\" (UID: \"fc6552aa-3834-4872-ac30-8196fd0dc80b\") " pod="openshift-must-gather-tftvv/must-gather-z4kh6" Dec 13 07:35:59 crc kubenswrapper[4644]: I1213 07:35:59.002202 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/fc6552aa-3834-4872-ac30-8196fd0dc80b-must-gather-output\") pod \"must-gather-z4kh6\" (UID: \"fc6552aa-3834-4872-ac30-8196fd0dc80b\") " pod="openshift-must-gather-tftvv/must-gather-z4kh6" Dec 13 07:35:59 crc kubenswrapper[4644]: I1213 07:35:59.104592 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnlmb\" (UniqueName: \"kubernetes.io/projected/fc6552aa-3834-4872-ac30-8196fd0dc80b-kube-api-access-xnlmb\") pod \"must-gather-z4kh6\" (UID: \"fc6552aa-3834-4872-ac30-8196fd0dc80b\") " pod="openshift-must-gather-tftvv/must-gather-z4kh6" Dec 13 07:35:59 crc kubenswrapper[4644]: I1213 07:35:59.104780 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/fc6552aa-3834-4872-ac30-8196fd0dc80b-must-gather-output\") pod \"must-gather-z4kh6\" (UID: \"fc6552aa-3834-4872-ac30-8196fd0dc80b\") " pod="openshift-must-gather-tftvv/must-gather-z4kh6" Dec 13 07:35:59 crc kubenswrapper[4644]: I1213 07:35:59.105176 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/fc6552aa-3834-4872-ac30-8196fd0dc80b-must-gather-output\") pod \"must-gather-z4kh6\" (UID: \"fc6552aa-3834-4872-ac30-8196fd0dc80b\") " pod="openshift-must-gather-tftvv/must-gather-z4kh6" Dec 13 07:35:59 crc kubenswrapper[4644]: I1213 07:35:59.121277 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnlmb\" (UniqueName: \"kubernetes.io/projected/fc6552aa-3834-4872-ac30-8196fd0dc80b-kube-api-access-xnlmb\") pod \"must-gather-z4kh6\" (UID: \"fc6552aa-3834-4872-ac30-8196fd0dc80b\") " pod="openshift-must-gather-tftvv/must-gather-z4kh6" Dec 13 07:35:59 crc kubenswrapper[4644]: I1213 07:35:59.269212 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tftvv/must-gather-z4kh6" Dec 13 07:35:59 crc kubenswrapper[4644]: I1213 07:35:59.677143 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-tftvv/must-gather-z4kh6"] Dec 13 07:36:00 crc kubenswrapper[4644]: I1213 07:36:00.229549 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tftvv/must-gather-z4kh6" event={"ID":"fc6552aa-3834-4872-ac30-8196fd0dc80b","Type":"ContainerStarted","Data":"72a202d47ee6002b0c6c9c20427e4cdc32fb8b350b24b22c771f7f37e389e171"} Dec 13 07:36:03 crc kubenswrapper[4644]: I1213 07:36:03.389513 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:36:03 crc kubenswrapper[4644]: E1213 07:36:03.390366 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:36:05 crc kubenswrapper[4644]: I1213 07:36:05.287019 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tftvv/must-gather-z4kh6" event={"ID":"fc6552aa-3834-4872-ac30-8196fd0dc80b","Type":"ContainerStarted","Data":"1bfa9a7344124006c021081657f55b79f2c2aaf04c88b452ef76dbb1698bee83"} Dec 13 07:36:06 crc kubenswrapper[4644]: I1213 07:36:06.295680 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tftvv/must-gather-z4kh6" event={"ID":"fc6552aa-3834-4872-ac30-8196fd0dc80b","Type":"ContainerStarted","Data":"83335dcb2c9c084900b6cea6d3e4e181606501165f1986ee83eee3208a5ba719"} Dec 13 07:36:06 crc kubenswrapper[4644]: I1213 07:36:06.308169 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-tftvv/must-gather-z4kh6" podStartSLOduration=3.078996271 podStartE2EDuration="8.308149066s" podCreationTimestamp="2025-12-13 07:35:58 +0000 UTC" firstStartedPulling="2025-12-13 07:35:59.679633107 +0000 UTC m=+3021.894583940" lastFinishedPulling="2025-12-13 07:36:04.908785902 +0000 UTC m=+3027.123736735" observedRunningTime="2025-12-13 07:36:06.307648084 +0000 UTC m=+3028.522598917" watchObservedRunningTime="2025-12-13 07:36:06.308149066 +0000 UTC m=+3028.523099898" Dec 13 07:36:08 crc kubenswrapper[4644]: I1213 07:36:08.747077 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-tftvv/crc-debug-gxlp9"] Dec 13 07:36:08 crc kubenswrapper[4644]: I1213 07:36:08.748737 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tftvv/crc-debug-gxlp9" Dec 13 07:36:08 crc kubenswrapper[4644]: I1213 07:36:08.858329 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlbds\" (UniqueName: \"kubernetes.io/projected/339cdeae-4efc-4747-a0d0-f833eb4dccf9-kube-api-access-qlbds\") pod \"crc-debug-gxlp9\" (UID: \"339cdeae-4efc-4747-a0d0-f833eb4dccf9\") " pod="openshift-must-gather-tftvv/crc-debug-gxlp9" Dec 13 07:36:08 crc kubenswrapper[4644]: I1213 07:36:08.858669 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/339cdeae-4efc-4747-a0d0-f833eb4dccf9-host\") pod \"crc-debug-gxlp9\" (UID: \"339cdeae-4efc-4747-a0d0-f833eb4dccf9\") " pod="openshift-must-gather-tftvv/crc-debug-gxlp9" Dec 13 07:36:08 crc kubenswrapper[4644]: I1213 07:36:08.960819 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlbds\" (UniqueName: \"kubernetes.io/projected/339cdeae-4efc-4747-a0d0-f833eb4dccf9-kube-api-access-qlbds\") pod \"crc-debug-gxlp9\" (UID: \"339cdeae-4efc-4747-a0d0-f833eb4dccf9\") " pod="openshift-must-gather-tftvv/crc-debug-gxlp9" Dec 13 07:36:08 crc kubenswrapper[4644]: I1213 07:36:08.961011 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/339cdeae-4efc-4747-a0d0-f833eb4dccf9-host\") pod \"crc-debug-gxlp9\" (UID: \"339cdeae-4efc-4747-a0d0-f833eb4dccf9\") " pod="openshift-must-gather-tftvv/crc-debug-gxlp9" Dec 13 07:36:08 crc kubenswrapper[4644]: I1213 07:36:08.961088 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/339cdeae-4efc-4747-a0d0-f833eb4dccf9-host\") pod \"crc-debug-gxlp9\" (UID: \"339cdeae-4efc-4747-a0d0-f833eb4dccf9\") " pod="openshift-must-gather-tftvv/crc-debug-gxlp9" Dec 13 07:36:08 crc kubenswrapper[4644]: I1213 07:36:08.992815 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlbds\" (UniqueName: \"kubernetes.io/projected/339cdeae-4efc-4747-a0d0-f833eb4dccf9-kube-api-access-qlbds\") pod \"crc-debug-gxlp9\" (UID: \"339cdeae-4efc-4747-a0d0-f833eb4dccf9\") " pod="openshift-must-gather-tftvv/crc-debug-gxlp9" Dec 13 07:36:09 crc kubenswrapper[4644]: I1213 07:36:09.070419 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tftvv/crc-debug-gxlp9" Dec 13 07:36:09 crc kubenswrapper[4644]: W1213 07:36:09.096215 4644 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod339cdeae_4efc_4747_a0d0_f833eb4dccf9.slice/crio-feb673028938b870b7b3bad6231ccc7891ded176c65235bc728b8c9f2d95f2df WatchSource:0}: Error finding container feb673028938b870b7b3bad6231ccc7891ded176c65235bc728b8c9f2d95f2df: Status 404 returned error can't find the container with id feb673028938b870b7b3bad6231ccc7891ded176c65235bc728b8c9f2d95f2df Dec 13 07:36:09 crc kubenswrapper[4644]: I1213 07:36:09.323830 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tftvv/crc-debug-gxlp9" event={"ID":"339cdeae-4efc-4747-a0d0-f833eb4dccf9","Type":"ContainerStarted","Data":"feb673028938b870b7b3bad6231ccc7891ded176c65235bc728b8c9f2d95f2df"} Dec 13 07:36:15 crc kubenswrapper[4644]: I1213 07:36:15.389732 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:36:15 crc kubenswrapper[4644]: E1213 07:36:15.390629 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:36:19 crc kubenswrapper[4644]: I1213 07:36:19.425868 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tftvv/crc-debug-gxlp9" event={"ID":"339cdeae-4efc-4747-a0d0-f833eb4dccf9","Type":"ContainerStarted","Data":"cd485211738a737243f666eee565e025ad7f090f84b56a455ddb9f68cfa598e8"} Dec 13 07:36:19 crc kubenswrapper[4644]: I1213 07:36:19.442218 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-tftvv/crc-debug-gxlp9" podStartSLOduration=2.008384236 podStartE2EDuration="11.442202498s" podCreationTimestamp="2025-12-13 07:36:08 +0000 UTC" firstStartedPulling="2025-12-13 07:36:09.098387303 +0000 UTC m=+3031.313338136" lastFinishedPulling="2025-12-13 07:36:18.532205565 +0000 UTC m=+3040.747156398" observedRunningTime="2025-12-13 07:36:19.441697647 +0000 UTC m=+3041.656648481" watchObservedRunningTime="2025-12-13 07:36:19.442202498 +0000 UTC m=+3041.657153330" Dec 13 07:36:26 crc kubenswrapper[4644]: I1213 07:36:26.389189 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:36:26 crc kubenswrapper[4644]: E1213 07:36:26.390092 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:36:36 crc kubenswrapper[4644]: I1213 07:36:36.559720 4644 generic.go:334] "Generic (PLEG): container finished" podID="339cdeae-4efc-4747-a0d0-f833eb4dccf9" containerID="cd485211738a737243f666eee565e025ad7f090f84b56a455ddb9f68cfa598e8" exitCode=0 Dec 13 07:36:36 crc kubenswrapper[4644]: I1213 07:36:36.560128 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tftvv/crc-debug-gxlp9" event={"ID":"339cdeae-4efc-4747-a0d0-f833eb4dccf9","Type":"ContainerDied","Data":"cd485211738a737243f666eee565e025ad7f090f84b56a455ddb9f68cfa598e8"} Dec 13 07:36:37 crc kubenswrapper[4644]: I1213 07:36:37.389637 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:36:37 crc kubenswrapper[4644]: E1213 07:36:37.390316 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:36:37 crc kubenswrapper[4644]: I1213 07:36:37.654172 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tftvv/crc-debug-gxlp9" Dec 13 07:36:37 crc kubenswrapper[4644]: I1213 07:36:37.680676 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-tftvv/crc-debug-gxlp9"] Dec 13 07:36:37 crc kubenswrapper[4644]: I1213 07:36:37.686369 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-tftvv/crc-debug-gxlp9"] Dec 13 07:36:37 crc kubenswrapper[4644]: I1213 07:36:37.711466 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/339cdeae-4efc-4747-a0d0-f833eb4dccf9-host\") pod \"339cdeae-4efc-4747-a0d0-f833eb4dccf9\" (UID: \"339cdeae-4efc-4747-a0d0-f833eb4dccf9\") " Dec 13 07:36:37 crc kubenswrapper[4644]: I1213 07:36:37.711568 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlbds\" (UniqueName: \"kubernetes.io/projected/339cdeae-4efc-4747-a0d0-f833eb4dccf9-kube-api-access-qlbds\") pod \"339cdeae-4efc-4747-a0d0-f833eb4dccf9\" (UID: \"339cdeae-4efc-4747-a0d0-f833eb4dccf9\") " Dec 13 07:36:37 crc kubenswrapper[4644]: I1213 07:36:37.711623 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/339cdeae-4efc-4747-a0d0-f833eb4dccf9-host" (OuterVolumeSpecName: "host") pod "339cdeae-4efc-4747-a0d0-f833eb4dccf9" (UID: "339cdeae-4efc-4747-a0d0-f833eb4dccf9"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:36:37 crc kubenswrapper[4644]: I1213 07:36:37.712208 4644 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/339cdeae-4efc-4747-a0d0-f833eb4dccf9-host\") on node \"crc\" DevicePath \"\"" Dec 13 07:36:37 crc kubenswrapper[4644]: I1213 07:36:37.719576 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/339cdeae-4efc-4747-a0d0-f833eb4dccf9-kube-api-access-qlbds" (OuterVolumeSpecName: "kube-api-access-qlbds") pod "339cdeae-4efc-4747-a0d0-f833eb4dccf9" (UID: "339cdeae-4efc-4747-a0d0-f833eb4dccf9"). InnerVolumeSpecName "kube-api-access-qlbds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:36:37 crc kubenswrapper[4644]: I1213 07:36:37.813870 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlbds\" (UniqueName: \"kubernetes.io/projected/339cdeae-4efc-4747-a0d0-f833eb4dccf9-kube-api-access-qlbds\") on node \"crc\" DevicePath \"\"" Dec 13 07:36:38 crc kubenswrapper[4644]: I1213 07:36:38.399340 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="339cdeae-4efc-4747-a0d0-f833eb4dccf9" path="/var/lib/kubelet/pods/339cdeae-4efc-4747-a0d0-f833eb4dccf9/volumes" Dec 13 07:36:38 crc kubenswrapper[4644]: I1213 07:36:38.577401 4644 scope.go:117] "RemoveContainer" containerID="cd485211738a737243f666eee565e025ad7f090f84b56a455ddb9f68cfa598e8" Dec 13 07:36:38 crc kubenswrapper[4644]: I1213 07:36:38.577596 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tftvv/crc-debug-gxlp9" Dec 13 07:36:38 crc kubenswrapper[4644]: I1213 07:36:38.860435 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-tftvv/crc-debug-m9vv8"] Dec 13 07:36:38 crc kubenswrapper[4644]: E1213 07:36:38.860767 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="339cdeae-4efc-4747-a0d0-f833eb4dccf9" containerName="container-00" Dec 13 07:36:38 crc kubenswrapper[4644]: I1213 07:36:38.860779 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="339cdeae-4efc-4747-a0d0-f833eb4dccf9" containerName="container-00" Dec 13 07:36:38 crc kubenswrapper[4644]: I1213 07:36:38.860930 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="339cdeae-4efc-4747-a0d0-f833eb4dccf9" containerName="container-00" Dec 13 07:36:38 crc kubenswrapper[4644]: I1213 07:36:38.861436 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tftvv/crc-debug-m9vv8" Dec 13 07:36:38 crc kubenswrapper[4644]: I1213 07:36:38.951700 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a0a078e0-da55-4798-973b-2cc8b6a09c90-host\") pod \"crc-debug-m9vv8\" (UID: \"a0a078e0-da55-4798-973b-2cc8b6a09c90\") " pod="openshift-must-gather-tftvv/crc-debug-m9vv8" Dec 13 07:36:38 crc kubenswrapper[4644]: I1213 07:36:38.952413 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nd7qj\" (UniqueName: \"kubernetes.io/projected/a0a078e0-da55-4798-973b-2cc8b6a09c90-kube-api-access-nd7qj\") pod \"crc-debug-m9vv8\" (UID: \"a0a078e0-da55-4798-973b-2cc8b6a09c90\") " pod="openshift-must-gather-tftvv/crc-debug-m9vv8" Dec 13 07:36:39 crc kubenswrapper[4644]: I1213 07:36:39.055360 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nd7qj\" (UniqueName: \"kubernetes.io/projected/a0a078e0-da55-4798-973b-2cc8b6a09c90-kube-api-access-nd7qj\") pod \"crc-debug-m9vv8\" (UID: \"a0a078e0-da55-4798-973b-2cc8b6a09c90\") " pod="openshift-must-gather-tftvv/crc-debug-m9vv8" Dec 13 07:36:39 crc kubenswrapper[4644]: I1213 07:36:39.055786 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a0a078e0-da55-4798-973b-2cc8b6a09c90-host\") pod \"crc-debug-m9vv8\" (UID: \"a0a078e0-da55-4798-973b-2cc8b6a09c90\") " pod="openshift-must-gather-tftvv/crc-debug-m9vv8" Dec 13 07:36:39 crc kubenswrapper[4644]: I1213 07:36:39.055872 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a0a078e0-da55-4798-973b-2cc8b6a09c90-host\") pod \"crc-debug-m9vv8\" (UID: \"a0a078e0-da55-4798-973b-2cc8b6a09c90\") " pod="openshift-must-gather-tftvv/crc-debug-m9vv8" Dec 13 07:36:39 crc kubenswrapper[4644]: I1213 07:36:39.073874 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nd7qj\" (UniqueName: \"kubernetes.io/projected/a0a078e0-da55-4798-973b-2cc8b6a09c90-kube-api-access-nd7qj\") pod \"crc-debug-m9vv8\" (UID: \"a0a078e0-da55-4798-973b-2cc8b6a09c90\") " pod="openshift-must-gather-tftvv/crc-debug-m9vv8" Dec 13 07:36:39 crc kubenswrapper[4644]: I1213 07:36:39.175623 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tftvv/crc-debug-m9vv8" Dec 13 07:36:39 crc kubenswrapper[4644]: I1213 07:36:39.587893 4644 generic.go:334] "Generic (PLEG): container finished" podID="a0a078e0-da55-4798-973b-2cc8b6a09c90" containerID="9fd4f62d186484d34377d5c3c504d811713b8c12728c869c512add6e320901a5" exitCode=1 Dec 13 07:36:39 crc kubenswrapper[4644]: I1213 07:36:39.587978 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tftvv/crc-debug-m9vv8" event={"ID":"a0a078e0-da55-4798-973b-2cc8b6a09c90","Type":"ContainerDied","Data":"9fd4f62d186484d34377d5c3c504d811713b8c12728c869c512add6e320901a5"} Dec 13 07:36:39 crc kubenswrapper[4644]: I1213 07:36:39.588248 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tftvv/crc-debug-m9vv8" event={"ID":"a0a078e0-da55-4798-973b-2cc8b6a09c90","Type":"ContainerStarted","Data":"50446121d8449d61dbcfb99f84bff0ec29ae322ecd5c7061b315681866359a48"} Dec 13 07:36:39 crc kubenswrapper[4644]: I1213 07:36:39.618468 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-tftvv/crc-debug-m9vv8"] Dec 13 07:36:39 crc kubenswrapper[4644]: I1213 07:36:39.628611 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-tftvv/crc-debug-m9vv8"] Dec 13 07:36:40 crc kubenswrapper[4644]: I1213 07:36:40.674141 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tftvv/crc-debug-m9vv8" Dec 13 07:36:40 crc kubenswrapper[4644]: I1213 07:36:40.695035 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a0a078e0-da55-4798-973b-2cc8b6a09c90-host\") pod \"a0a078e0-da55-4798-973b-2cc8b6a09c90\" (UID: \"a0a078e0-da55-4798-973b-2cc8b6a09c90\") " Dec 13 07:36:40 crc kubenswrapper[4644]: I1213 07:36:40.695166 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a0a078e0-da55-4798-973b-2cc8b6a09c90-host" (OuterVolumeSpecName: "host") pod "a0a078e0-da55-4798-973b-2cc8b6a09c90" (UID: "a0a078e0-da55-4798-973b-2cc8b6a09c90"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:36:40 crc kubenswrapper[4644]: I1213 07:36:40.695196 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nd7qj\" (UniqueName: \"kubernetes.io/projected/a0a078e0-da55-4798-973b-2cc8b6a09c90-kube-api-access-nd7qj\") pod \"a0a078e0-da55-4798-973b-2cc8b6a09c90\" (UID: \"a0a078e0-da55-4798-973b-2cc8b6a09c90\") " Dec 13 07:36:40 crc kubenswrapper[4644]: I1213 07:36:40.695817 4644 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a0a078e0-da55-4798-973b-2cc8b6a09c90-host\") on node \"crc\" DevicePath \"\"" Dec 13 07:36:40 crc kubenswrapper[4644]: I1213 07:36:40.700807 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0a078e0-da55-4798-973b-2cc8b6a09c90-kube-api-access-nd7qj" (OuterVolumeSpecName: "kube-api-access-nd7qj") pod "a0a078e0-da55-4798-973b-2cc8b6a09c90" (UID: "a0a078e0-da55-4798-973b-2cc8b6a09c90"). InnerVolumeSpecName "kube-api-access-nd7qj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:36:40 crc kubenswrapper[4644]: I1213 07:36:40.798135 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nd7qj\" (UniqueName: \"kubernetes.io/projected/a0a078e0-da55-4798-973b-2cc8b6a09c90-kube-api-access-nd7qj\") on node \"crc\" DevicePath \"\"" Dec 13 07:36:41 crc kubenswrapper[4644]: I1213 07:36:41.605957 4644 scope.go:117] "RemoveContainer" containerID="9fd4f62d186484d34377d5c3c504d811713b8c12728c869c512add6e320901a5" Dec 13 07:36:41 crc kubenswrapper[4644]: I1213 07:36:41.606015 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tftvv/crc-debug-m9vv8" Dec 13 07:36:42 crc kubenswrapper[4644]: I1213 07:36:42.401476 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0a078e0-da55-4798-973b-2cc8b6a09c90" path="/var/lib/kubelet/pods/a0a078e0-da55-4798-973b-2cc8b6a09c90/volumes" Dec 13 07:36:45 crc kubenswrapper[4644]: E1213 07:36:45.117210 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod339cdeae_4efc_4747_a0d0_f833eb4dccf9.slice\": RecentStats: unable to find data in memory cache]" Dec 13 07:36:50 crc kubenswrapper[4644]: I1213 07:36:50.390272 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:36:50 crc kubenswrapper[4644]: E1213 07:36:50.391099 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:36:54 crc kubenswrapper[4644]: E1213 07:36:54.397922 4644 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \"http://38.129.56.153:5001/v2/\": dial tcp 38.129.56.153:5001: i/o timeout" image="38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca" Dec 13 07:36:54 crc kubenswrapper[4644]: E1213 07:36:54.398625 4644 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \"http://38.129.56.153:5001/v2/\": dial tcp 38.129.56.153:5001: i/o timeout" image="38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca" Dec 13 07:36:54 crc kubenswrapper[4644]: E1213 07:36:54.398883 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kvtgd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-9fc9c756c-8sjtq_openstack-operators(511296bd-fff8-49c1-bbfd-b702905f6e83): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \"http://38.129.56.153:5001/v2/\": dial tcp 38.129.56.153:5001: i/o timeout" logger="UnhandledError" Dec 13 07:36:54 crc kubenswrapper[4644]: E1213 07:36:54.399984 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \\\"http://38.129.56.153:5001/v2/\\\": dial tcp 38.129.56.153:5001: i/o timeout\"" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" podUID="511296bd-fff8-49c1-bbfd-b702905f6e83" Dec 13 07:36:55 crc kubenswrapper[4644]: E1213 07:36:55.330512 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod339cdeae_4efc_4747_a0d0_f833eb4dccf9.slice\": RecentStats: unable to find data in memory cache]" Dec 13 07:37:03 crc kubenswrapper[4644]: I1213 07:37:03.389111 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:37:03 crc kubenswrapper[4644]: E1213 07:37:03.389778 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:37:05 crc kubenswrapper[4644]: E1213 07:37:05.554466 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod339cdeae_4efc_4747_a0d0_f833eb4dccf9.slice\": RecentStats: unable to find data in memory cache]" Dec 13 07:37:06 crc kubenswrapper[4644]: E1213 07:37:06.392174 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca\\\"\"" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" podUID="511296bd-fff8-49c1-bbfd-b702905f6e83" Dec 13 07:37:15 crc kubenswrapper[4644]: E1213 07:37:15.760197 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod339cdeae_4efc_4747_a0d0_f833eb4dccf9.slice\": RecentStats: unable to find data in memory cache]" Dec 13 07:37:17 crc kubenswrapper[4644]: I1213 07:37:17.389285 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:37:17 crc kubenswrapper[4644]: E1213 07:37:17.389813 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:37:25 crc kubenswrapper[4644]: E1213 07:37:25.956841 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod339cdeae_4efc_4747_a0d0_f833eb4dccf9.slice\": RecentStats: unable to find data in memory cache]" Dec 13 07:37:27 crc kubenswrapper[4644]: I1213 07:37:27.430495 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6cb6c55fb8-zbmvj_2806b62c-cd1d-4e9f-97be-70e9129ce932/barbican-api/0.log" Dec 13 07:37:27 crc kubenswrapper[4644]: I1213 07:37:27.486844 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6cb6c55fb8-zbmvj_2806b62c-cd1d-4e9f-97be-70e9129ce932/barbican-api-log/0.log" Dec 13 07:37:27 crc kubenswrapper[4644]: I1213 07:37:27.591934 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-8444d897db-7tb2t_e8c53ee2-0266-496a-858d-20f994ce474e/barbican-keystone-listener/0.log" Dec 13 07:37:27 crc kubenswrapper[4644]: I1213 07:37:27.599989 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-8444d897db-7tb2t_e8c53ee2-0266-496a-858d-20f994ce474e/barbican-keystone-listener-log/0.log" Dec 13 07:37:27 crc kubenswrapper[4644]: I1213 07:37:27.729381 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-56cc649647-pvdcb_d2f4d27c-142a-48a0-bbc2-28b24c27f8e3/barbican-worker/0.log" Dec 13 07:37:27 crc kubenswrapper[4644]: I1213 07:37:27.763254 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-56cc649647-pvdcb_d2f4d27c-142a-48a0-bbc2-28b24c27f8e3/barbican-worker-log/0.log" Dec 13 07:37:27 crc kubenswrapper[4644]: I1213 07:37:27.851239 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-xs4m2_bfce09ea-ec98-4fd2-a9ee-d9685f5215c3/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:27 crc kubenswrapper[4644]: I1213 07:37:27.947743 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_17956119-da29-422c-b808-c0894731283e/ceilometer-central-agent/0.log" Dec 13 07:37:28 crc kubenswrapper[4644]: I1213 07:37:28.005615 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_17956119-da29-422c-b808-c0894731283e/ceilometer-notification-agent/0.log" Dec 13 07:37:28 crc kubenswrapper[4644]: I1213 07:37:28.024593 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_17956119-da29-422c-b808-c0894731283e/proxy-httpd/0.log" Dec 13 07:37:28 crc kubenswrapper[4644]: I1213 07:37:28.056182 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_17956119-da29-422c-b808-c0894731283e/sg-core/0.log" Dec 13 07:37:28 crc kubenswrapper[4644]: I1213 07:37:28.157196 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-edpm-deployment-openstack-edpm-ipam-rvqj2_ce2ca357-6142-458b-8b5c-0f722a3a7d86/ceph-client-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:28 crc kubenswrapper[4644]: I1213 07:37:28.228107 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-zhwxb_f90586e1-30df-422c-b8c7-fdd4fac3112b/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:28 crc kubenswrapper[4644]: I1213 07:37:28.347826 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_a21196d3-f0e1-419a-9180-e57eaa042592/cinder-api/0.log" Dec 13 07:37:28 crc kubenswrapper[4644]: I1213 07:37:28.383472 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_a21196d3-f0e1-419a-9180-e57eaa042592/cinder-api-log/0.log" Dec 13 07:37:28 crc kubenswrapper[4644]: I1213 07:37:28.393698 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:37:28 crc kubenswrapper[4644]: E1213 07:37:28.394023 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:37:28 crc kubenswrapper[4644]: I1213 07:37:28.571868 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_45f91b2c-2718-493d-815e-9f9d2f763143/probe/0.log" Dec 13 07:37:28 crc kubenswrapper[4644]: I1213 07:37:28.595853 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_45f91b2c-2718-493d-815e-9f9d2f763143/cinder-backup/0.log" Dec 13 07:37:28 crc kubenswrapper[4644]: I1213 07:37:28.685571 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a40ebe98-ffe1-4a0f-8d7c-0f803ed45669/cinder-scheduler/0.log" Dec 13 07:37:28 crc kubenswrapper[4644]: I1213 07:37:28.731033 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a40ebe98-ffe1-4a0f-8d7c-0f803ed45669/probe/0.log" Dec 13 07:37:28 crc kubenswrapper[4644]: I1213 07:37:28.832172 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_aa86065a-5362-49a8-bc90-ea0a8495e132/cinder-volume/0.log" Dec 13 07:37:28 crc kubenswrapper[4644]: I1213 07:37:28.851468 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_aa86065a-5362-49a8-bc90-ea0a8495e132/probe/0.log" Dec 13 07:37:28 crc kubenswrapper[4644]: I1213 07:37:28.936586 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-n7jtf_66298b75-b28a-4f33-9507-9f8ec6bb4079/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:29 crc kubenswrapper[4644]: I1213 07:37:29.024831 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-4wr4p_61d39411-4b48-4ea1-b9f9-aa161d05ca46/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:29 crc kubenswrapper[4644]: I1213 07:37:29.103431 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67d475fdcf-drmz6_336b6941-d78a-455f-8b49-60dc81de435a/init/0.log" Dec 13 07:37:29 crc kubenswrapper[4644]: I1213 07:37:29.256532 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67d475fdcf-drmz6_336b6941-d78a-455f-8b49-60dc81de435a/init/0.log" Dec 13 07:37:29 crc kubenswrapper[4644]: I1213 07:37:29.293577 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3/glance-httpd/0.log" Dec 13 07:37:29 crc kubenswrapper[4644]: I1213 07:37:29.295942 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-67d475fdcf-drmz6_336b6941-d78a-455f-8b49-60dc81de435a/dnsmasq-dns/0.log" Dec 13 07:37:29 crc kubenswrapper[4644]: I1213 07:37:29.431924 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_7a7a4f6d-247d-4d1c-9eb2-9a43ddd831b3/glance-log/0.log" Dec 13 07:37:29 crc kubenswrapper[4644]: I1213 07:37:29.437145 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_d5415b9a-c4c6-4b46-beb9-6bc0fed2e723/glance-httpd/0.log" Dec 13 07:37:29 crc kubenswrapper[4644]: I1213 07:37:29.476859 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_d5415b9a-c4c6-4b46-beb9-6bc0fed2e723/glance-log/0.log" Dec 13 07:37:29 crc kubenswrapper[4644]: I1213 07:37:29.668310 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-668dddc65b-wlzwz_b7d37a2f-8117-4d49-8e28-f06339a276cf/horizon/0.log" Dec 13 07:37:29 crc kubenswrapper[4644]: I1213 07:37:29.818699 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-f8n54_251f4f31-9a39-4fd0-a492-09a3ffa3ce11/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:29 crc kubenswrapper[4644]: I1213 07:37:29.869028 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-668dddc65b-wlzwz_b7d37a2f-8117-4d49-8e28-f06339a276cf/horizon-log/0.log" Dec 13 07:37:29 crc kubenswrapper[4644]: I1213 07:37:29.936709 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-9zlvx_1cd50117-b865-4351-a364-a283893634ce/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:30 crc kubenswrapper[4644]: I1213 07:37:30.095202 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_316f5e12-4172-4f69-8aac-ec24edf8e012/kube-state-metrics/0.log" Dec 13 07:37:30 crc kubenswrapper[4644]: I1213 07:37:30.112728 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-84867c6db9-t2k7k_17cae40f-e78c-4b8b-8e5b-3cf6548c1cfa/keystone-api/0.log" Dec 13 07:37:30 crc kubenswrapper[4644]: I1213 07:37:30.273834 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-ts6vw_51c69e69-78ac-4480-8007-1b306d4ef7bf/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:30 crc kubenswrapper[4644]: I1213 07:37:30.309121 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-a410-account-create-update-6z7z6_d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5/mariadb-account-create-update/0.log" Dec 13 07:37:30 crc kubenswrapper[4644]: I1213 07:37:30.505771 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_6564caaf-8916-4f38-9bfd-c70c46b28887/manila-api-log/0.log" Dec 13 07:37:30 crc kubenswrapper[4644]: I1213 07:37:30.511177 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_6564caaf-8916-4f38-9bfd-c70c46b28887/manila-api/0.log" Dec 13 07:37:30 crc kubenswrapper[4644]: I1213 07:37:30.593990 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-db-create-zkrcn_6c34af67-d12c-49be-beed-0f83e5faa134/mariadb-database-create/0.log" Dec 13 07:37:30 crc kubenswrapper[4644]: I1213 07:37:30.661853 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-db-sync-6vs42_d158e5b2-13e4-4276-aba2-d5f061a842af/manila-db-sync/0.log" Dec 13 07:37:30 crc kubenswrapper[4644]: I1213 07:37:30.779320 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_2c69c54f-9157-4483-9cd6-3fe4a4035b75/manila-scheduler/0.log" Dec 13 07:37:30 crc kubenswrapper[4644]: I1213 07:37:30.782071 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_2c69c54f-9157-4483-9cd6-3fe4a4035b75/probe/0.log" Dec 13 07:37:30 crc kubenswrapper[4644]: I1213 07:37:30.876787 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc/probe/0.log" Dec 13 07:37:30 crc kubenswrapper[4644]: I1213 07:37:30.898417 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_8f44f3c6-c98c-4c26-bd47-9a0bf1cf2edc/manila-share/0.log" Dec 13 07:37:31 crc kubenswrapper[4644]: I1213 07:37:31.104628 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-589fc5f8d9-v292l_b130b8ec-1bd6-4b3f-975e-82f6d903da76/neutron-httpd/0.log" Dec 13 07:37:31 crc kubenswrapper[4644]: I1213 07:37:31.114794 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-589fc5f8d9-v292l_b130b8ec-1bd6-4b3f-975e-82f6d903da76/neutron-api/0.log" Dec 13 07:37:31 crc kubenswrapper[4644]: I1213 07:37:31.512964 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-b4jj5_08e5b710-aa24-40c9-9bd6-6d03543cc7b7/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:31 crc kubenswrapper[4644]: I1213 07:37:31.738818 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0b5d2de7-5524-41a6-a24c-8af21f5a01f0/nova-api-log/0.log" Dec 13 07:37:31 crc kubenswrapper[4644]: I1213 07:37:31.797331 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0b5d2de7-5524-41a6-a24c-8af21f5a01f0/nova-api-api/0.log" Dec 13 07:37:31 crc kubenswrapper[4644]: I1213 07:37:31.801411 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_087d15a1-0ac0-4c1b-a301-280be2b50018/nova-cell0-conductor-conductor/0.log" Dec 13 07:37:31 crc kubenswrapper[4644]: I1213 07:37:31.993935 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_d4364181-c18b-438a-8f4c-3b44b907d2ae/nova-cell1-conductor-conductor/0.log" Dec 13 07:37:32 crc kubenswrapper[4644]: I1213 07:37:32.065563 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_12464868-cccc-417b-b431-4cd8e1317137/nova-cell1-novncproxy-novncproxy/0.log" Dec 13 07:37:32 crc kubenswrapper[4644]: I1213 07:37:32.255824 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-w8nk6_f123c7e1-05b6-4a07-a7e9-b9d0cf90792c/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:32 crc kubenswrapper[4644]: I1213 07:37:32.400653 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_bf31dcb5-e753-4f68-aca2-39815bdf203d/nova-metadata-log/0.log" Dec 13 07:37:32 crc kubenswrapper[4644]: I1213 07:37:32.652040 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_3eec27e2-843c-4cb9-bf6a-4c4c01cbec9c/nova-scheduler-scheduler/0.log" Dec 13 07:37:32 crc kubenswrapper[4644]: I1213 07:37:32.698768 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e8e96f9f-4c53-4df2-b818-c3341709594f/mysql-bootstrap/0.log" Dec 13 07:37:32 crc kubenswrapper[4644]: I1213 07:37:32.868417 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e8e96f9f-4c53-4df2-b818-c3341709594f/mysql-bootstrap/0.log" Dec 13 07:37:32 crc kubenswrapper[4644]: I1213 07:37:32.943933 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e8e96f9f-4c53-4df2-b818-c3341709594f/galera/0.log" Dec 13 07:37:33 crc kubenswrapper[4644]: I1213 07:37:33.108196 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_8a194627-4be2-4f29-ab01-c84d89070739/mysql-bootstrap/0.log" Dec 13 07:37:33 crc kubenswrapper[4644]: I1213 07:37:33.293836 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_8a194627-4be2-4f29-ab01-c84d89070739/galera/0.log" Dec 13 07:37:33 crc kubenswrapper[4644]: I1213 07:37:33.312655 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_8a194627-4be2-4f29-ab01-c84d89070739/mysql-bootstrap/0.log" Dec 13 07:37:33 crc kubenswrapper[4644]: I1213 07:37:33.380008 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_bf31dcb5-e753-4f68-aca2-39815bdf203d/nova-metadata-metadata/0.log" Dec 13 07:37:33 crc kubenswrapper[4644]: I1213 07:37:33.469753 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_4ff37ee4-af22-4e5c-9386-e117905d1faf/openstackclient/0.log" Dec 13 07:37:33 crc kubenswrapper[4644]: I1213 07:37:33.542062 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-dpzsf_1cb5b56c-b83a-4d43-b1fa-2a88580eceea/openstack-network-exporter/0.log" Dec 13 07:37:33 crc kubenswrapper[4644]: I1213 07:37:33.658037 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-cb7bh_0d73b937-2604-4e87-867b-acdc34d21e5e/ovsdb-server-init/0.log" Dec 13 07:37:33 crc kubenswrapper[4644]: I1213 07:37:33.895657 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-cb7bh_0d73b937-2604-4e87-867b-acdc34d21e5e/ovs-vswitchd/0.log" Dec 13 07:37:33 crc kubenswrapper[4644]: I1213 07:37:33.900772 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-cb7bh_0d73b937-2604-4e87-867b-acdc34d21e5e/ovsdb-server-init/0.log" Dec 13 07:37:33 crc kubenswrapper[4644]: I1213 07:37:33.907175 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-cb7bh_0d73b937-2604-4e87-867b-acdc34d21e5e/ovsdb-server/0.log" Dec 13 07:37:34 crc kubenswrapper[4644]: I1213 07:37:34.129046 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-jqzfh_c8ae022b-1ddd-4d25-b060-b973b7925fb4/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:34 crc kubenswrapper[4644]: I1213 07:37:34.155373 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-txrxh_6e2e749f-da1d-4692-9479-275cd00bc4b0/ovn-controller/0.log" Dec 13 07:37:34 crc kubenswrapper[4644]: I1213 07:37:34.329069 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_200e0c3e-a409-4cad-8ed3-de1f4f209091/ovn-northd/0.log" Dec 13 07:37:34 crc kubenswrapper[4644]: I1213 07:37:34.331572 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_200e0c3e-a409-4cad-8ed3-de1f4f209091/openstack-network-exporter/0.log" Dec 13 07:37:34 crc kubenswrapper[4644]: I1213 07:37:34.371313 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_40233f71-8156-4715-adce-ef3fee2102e2/openstack-network-exporter/0.log" Dec 13 07:37:34 crc kubenswrapper[4644]: I1213 07:37:34.536967 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_40233f71-8156-4715-adce-ef3fee2102e2/ovsdbserver-nb/0.log" Dec 13 07:37:34 crc kubenswrapper[4644]: I1213 07:37:34.625601 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_829c5d95-3315-442d-8ec5-ed6a67497802/ovsdbserver-sb/0.log" Dec 13 07:37:34 crc kubenswrapper[4644]: I1213 07:37:34.676691 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_829c5d95-3315-442d-8ec5-ed6a67497802/openstack-network-exporter/0.log" Dec 13 07:37:34 crc kubenswrapper[4644]: I1213 07:37:34.812584 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-8f5479c4d-vcv6r_ccdc6627-1b86-41d6-993f-a2c0e641b81c/placement-api/0.log" Dec 13 07:37:34 crc kubenswrapper[4644]: I1213 07:37:34.905410 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-8f5479c4d-vcv6r_ccdc6627-1b86-41d6-993f-a2c0e641b81c/placement-log/0.log" Dec 13 07:37:34 crc kubenswrapper[4644]: I1213 07:37:34.985844 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_43c1f8b3-1d91-4152-bf2d-be501022615a/setup-container/0.log" Dec 13 07:37:35 crc kubenswrapper[4644]: I1213 07:37:35.126282 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_43c1f8b3-1d91-4152-bf2d-be501022615a/setup-container/0.log" Dec 13 07:37:35 crc kubenswrapper[4644]: I1213 07:37:35.165181 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_43c1f8b3-1d91-4152-bf2d-be501022615a/rabbitmq/0.log" Dec 13 07:37:35 crc kubenswrapper[4644]: I1213 07:37:35.242489 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_78c20695-df2a-4d1d-a8e8-4dc7817b5803/setup-container/0.log" Dec 13 07:37:35 crc kubenswrapper[4644]: I1213 07:37:35.460881 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_78c20695-df2a-4d1d-a8e8-4dc7817b5803/setup-container/0.log" Dec 13 07:37:35 crc kubenswrapper[4644]: I1213 07:37:35.480018 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_78c20695-df2a-4d1d-a8e8-4dc7817b5803/rabbitmq/0.log" Dec 13 07:37:35 crc kubenswrapper[4644]: I1213 07:37:35.507487 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-w9bfc_bf00b61e-57cf-4cc2-8fd7-44b661469364/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:35 crc kubenswrapper[4644]: I1213 07:37:35.705356 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-cdbgp_446c5329-ac75-4c95-a98a-a18ce659ebcd/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:35 crc kubenswrapper[4644]: I1213 07:37:35.752470 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-4pgl2_40249106-b634-45cd-b5fc-5648159b34c5/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:35 crc kubenswrapper[4644]: I1213 07:37:35.879543 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-cvtj8_70ea68cc-693f-4b1c-b563-b25ac7d5da93/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:36 crc kubenswrapper[4644]: I1213 07:37:36.033202 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-jpm9x_4e75d1e6-dda9-48c7-b6ca-5105f4f6592b/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:36 crc kubenswrapper[4644]: I1213 07:37:36.121034 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-z2w8l_802870f9-d6d6-493e-a2b0-69d7067dadbe/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:36 crc kubenswrapper[4644]: E1213 07:37:36.227170 4644 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod339cdeae_4efc_4747_a0d0_f833eb4dccf9.slice\": RecentStats: unable to find data in memory cache]" Dec 13 07:37:36 crc kubenswrapper[4644]: I1213 07:37:36.260001 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-r94dw_cc7fbb6b-2295-4c73-a14f-d5888e6fc75c/ssh-known-hosts-edpm-deployment/0.log" Dec 13 07:37:36 crc kubenswrapper[4644]: I1213 07:37:36.314742 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-d9xdz_57f610f9-fbe1-408a-b364-c395f20690dd/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 13 07:37:39 crc kubenswrapper[4644]: I1213 07:37:39.390216 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:37:39 crc kubenswrapper[4644]: E1213 07:37:39.391041 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:37:41 crc kubenswrapper[4644]: I1213 07:37:41.437704 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_103d51fb-7ed3-487e-819f-bbcdfb2dea86/memcached/0.log" Dec 13 07:37:54 crc kubenswrapper[4644]: I1213 07:37:54.389116 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:37:54 crc kubenswrapper[4644]: E1213 07:37:54.389730 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:37:55 crc kubenswrapper[4644]: I1213 07:37:55.896919 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-95949466-9ffgl_3ec726b0-e1c1-497a-9364-f483cdf9b69b/manager/0.log" Dec 13 07:37:56 crc kubenswrapper[4644]: I1213 07:37:56.077878 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-5cf45c46bd-tndds_11c2a2ff-6f82-4b30-909b-f0f8c1e92394/manager/0.log" Dec 13 07:37:56 crc kubenswrapper[4644]: I1213 07:37:56.101695 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-66f8b87655-h6nxs_32a38d48-fe84-4ede-860c-ae76de27cbe6/manager/0.log" Dec 13 07:37:56 crc kubenswrapper[4644]: I1213 07:37:56.235413 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx_69c15b9d-0c5d-472e-9a3f-b9b442ca557c/util/0.log" Dec 13 07:37:56 crc kubenswrapper[4644]: I1213 07:37:56.369278 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx_69c15b9d-0c5d-472e-9a3f-b9b442ca557c/pull/0.log" Dec 13 07:37:56 crc kubenswrapper[4644]: I1213 07:37:56.373191 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx_69c15b9d-0c5d-472e-9a3f-b9b442ca557c/pull/0.log" Dec 13 07:37:56 crc kubenswrapper[4644]: I1213 07:37:56.374844 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx_69c15b9d-0c5d-472e-9a3f-b9b442ca557c/util/0.log" Dec 13 07:37:56 crc kubenswrapper[4644]: I1213 07:37:56.511636 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx_69c15b9d-0c5d-472e-9a3f-b9b442ca557c/extract/0.log" Dec 13 07:37:56 crc kubenswrapper[4644]: I1213 07:37:56.523095 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx_69c15b9d-0c5d-472e-9a3f-b9b442ca557c/pull/0.log" Dec 13 07:37:56 crc kubenswrapper[4644]: I1213 07:37:56.544325 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fsdfcx_69c15b9d-0c5d-472e-9a3f-b9b442ca557c/util/0.log" Dec 13 07:37:56 crc kubenswrapper[4644]: I1213 07:37:56.736099 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-767f9d7567-z2xnf_fd6f17a4-40cc-4465-8c67-58c67230344d/manager/0.log" Dec 13 07:37:56 crc kubenswrapper[4644]: I1213 07:37:56.764545 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-59b8dcb766-9m7mp_98e02ffd-3d31-4b00-8bc7-5f225cdf9fc5/manager/0.log" Dec 13 07:37:56 crc kubenswrapper[4644]: I1213 07:37:56.884198 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6ccf486b9-zmcbq_aac5283b-a0c7-4cac-8a72-07ca5444b743/manager/0.log" Dec 13 07:37:57 crc kubenswrapper[4644]: I1213 07:37:57.264960 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-f458558d7-fhckm_ac41b645-ea22-42ac-846e-fa16d0beaee4/manager/0.log" Dec 13 07:37:57 crc kubenswrapper[4644]: I1213 07:37:57.270329 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-58944d7758-4p77w_b8c3ef08-66ae-474e-8204-2338afb7d08d/manager/0.log" Dec 13 07:37:57 crc kubenswrapper[4644]: I1213 07:37:57.484933 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5c7cbf548f-jfdpn_5e5582c5-50c3-4c4f-9693-16f2a71543ce/manager/0.log" Dec 13 07:37:57 crc kubenswrapper[4644]: I1213 07:37:57.512108 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5fdd9786f7-58rgg_42b2a1fb-b5d1-46ff-932e-d831b53febf7/manager/0.log" Dec 13 07:37:57 crc kubenswrapper[4644]: I1213 07:37:57.662752 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-f76f4954c-cmcbp_90ea237e-4f56-4008-a2df-d3c404424374/manager/0.log" Dec 13 07:37:57 crc kubenswrapper[4644]: I1213 07:37:57.671851 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-7cd87b778f-vgdnd_d143ef34-f1db-411d-941b-c229888e22b2/manager/0.log" Dec 13 07:37:57 crc kubenswrapper[4644]: I1213 07:37:57.855385 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-68c649d9d-k2fqd_d86f3cba-c9ef-47eb-b04e-8f10ac1b0734/manager/0.log" Dec 13 07:37:57 crc kubenswrapper[4644]: I1213 07:37:57.890926 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5fbbf8b6cc-464zb_08cf1d52-d8b7-477f-92c7-1dd2732ff9e3/manager/0.log" Dec 13 07:37:57 crc kubenswrapper[4644]: I1213 07:37:57.992570 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-689f887b544qprx_7cd61a98-cc77-41b1-a06f-912207565b37/manager/0.log" Dec 13 07:37:58 crc kubenswrapper[4644]: I1213 07:37:58.214482 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-859586489-hlw4r_24da4990-742e-476a-aa8b-0a30e8dc0930/operator/0.log" Dec 13 07:37:58 crc kubenswrapper[4644]: I1213 07:37:58.361488 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-2ktgc_8a8b2af3-ff75-4a0a-a3ec-6f1b90619082/registry-server/0.log" Dec 13 07:37:58 crc kubenswrapper[4644]: I1213 07:37:58.528844 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-bf6d4f946-mtbhx_9f650b3c-af01-4ce4-a702-daab8d5affc5/manager/0.log" Dec 13 07:37:58 crc kubenswrapper[4644]: I1213 07:37:58.678756 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-8665b56d78-kfnbp_2e6fefac-bf85-4f28-a30d-808e83a13141/manager/0.log" Dec 13 07:37:58 crc kubenswrapper[4644]: I1213 07:37:58.811747 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-gfl6d_088c2258-52fc-4a04-b4c8-af259e9d2b75/operator/0.log" Dec 13 07:37:58 crc kubenswrapper[4644]: I1213 07:37:58.961170 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5c6df8f9-bvc6f_da649804-862c-45db-97ee-ad47fed7a72d/manager/0.log" Dec 13 07:37:59 crc kubenswrapper[4644]: I1213 07:37:59.102270 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-56f6fbdf6-q2xrp_48353918-3568-4a9c-a5d2-709fb831ee75/manager/0.log" Dec 13 07:37:59 crc kubenswrapper[4644]: I1213 07:37:59.140818 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-97d456b9-fqsfn_1adec510-a153-47b2-ae1d-5430d4ff5e31/manager/0.log" Dec 13 07:37:59 crc kubenswrapper[4644]: I1213 07:37:59.279483 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-756ccf86c7-46n8m_a02dee9b-ffed-4a5a-b833-cb236c105371/manager/0.log" Dec 13 07:37:59 crc kubenswrapper[4644]: I1213 07:37:59.421020 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-55f78b7c4c-zgnj9_3b75e9d6-b3a1-46ec-ae83-830583970e9c/manager/0.log" Dec 13 07:38:05 crc kubenswrapper[4644]: I1213 07:38:05.388974 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:38:05 crc kubenswrapper[4644]: E1213 07:38:05.389633 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:38:14 crc kubenswrapper[4644]: I1213 07:38:14.539918 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-mrjxs_29418836-a9b6-42c2-90b1-755ff73fe3fa/control-plane-machine-set-operator/0.log" Dec 13 07:38:14 crc kubenswrapper[4644]: I1213 07:38:14.679715 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-jjpl8_a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50/kube-rbac-proxy/0.log" Dec 13 07:38:14 crc kubenswrapper[4644]: I1213 07:38:14.706831 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-jjpl8_a092bf2e-2ca3-4e02-b0d3-09d4cedb4e50/machine-api-operator/0.log" Dec 13 07:38:16 crc kubenswrapper[4644]: I1213 07:38:16.391196 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:38:16 crc kubenswrapper[4644]: E1213 07:38:16.392004 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:38:24 crc kubenswrapper[4644]: I1213 07:38:24.490684 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-n9bvw_528a14ec-8d24-4cd8-8bbe-bbd9871a1891/cert-manager-controller/0.log" Dec 13 07:38:24 crc kubenswrapper[4644]: I1213 07:38:24.634072 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-nmcw8_97814a84-2776-43f5-b7ce-9ac0cd79f716/cert-manager-cainjector/0.log" Dec 13 07:38:24 crc kubenswrapper[4644]: I1213 07:38:24.657878 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-wvbx4_1e9683a7-097e-46d6-87b0-2a55804f684f/cert-manager-webhook/0.log" Dec 13 07:38:31 crc kubenswrapper[4644]: I1213 07:38:31.389352 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:38:31 crc kubenswrapper[4644]: E1213 07:38:31.390043 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:38:33 crc kubenswrapper[4644]: I1213 07:38:33.913135 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6ff7998486-ftvkt_6f0a5aec-c142-49fa-bece-020038485089/nmstate-console-plugin/0.log" Dec 13 07:38:34 crc kubenswrapper[4644]: I1213 07:38:34.045880 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-ck7qt_4c4e58d7-38d7-499f-91bf-eda13e345a14/nmstate-handler/0.log" Dec 13 07:38:34 crc kubenswrapper[4644]: I1213 07:38:34.073545 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-85swm_4a8400f2-9d22-4c57-95fc-9f9a4a7bd0d7/kube-rbac-proxy/0.log" Dec 13 07:38:34 crc kubenswrapper[4644]: I1213 07:38:34.115131 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-85swm_4a8400f2-9d22-4c57-95fc-9f9a4a7bd0d7/nmstate-metrics/0.log" Dec 13 07:38:34 crc kubenswrapper[4644]: I1213 07:38:34.216280 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-6769fb99d-m8zfg_4a77404c-19db-4d2a-bb02-c6e81d7b6f0f/nmstate-operator/0.log" Dec 13 07:38:34 crc kubenswrapper[4644]: I1213 07:38:34.277082 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-f8fb84555-p84qq_91c32695-2077-4074-9e11-424ea074c4a6/nmstate-webhook/0.log" Dec 13 07:38:44 crc kubenswrapper[4644]: I1213 07:38:44.927287 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-55rq7_3125ef9d-0678-4f6c-9080-6b08ac39744a/kube-rbac-proxy/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.043528 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-55rq7_3125ef9d-0678-4f6c-9080-6b08ac39744a/controller/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.134896 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/cp-frr-files/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.245235 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/cp-frr-files/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.272146 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/cp-reloader/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.297144 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/cp-metrics/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.326044 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/cp-reloader/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.438926 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/cp-frr-files/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.468655 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/cp-metrics/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.486889 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/cp-reloader/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.499815 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/cp-metrics/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.641959 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/cp-reloader/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.644706 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/cp-frr-files/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.673119 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/controller/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.705058 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/cp-metrics/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.812940 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/frr-metrics/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.873592 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/kube-rbac-proxy/0.log" Dec 13 07:38:45 crc kubenswrapper[4644]: I1213 07:38:45.888565 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/kube-rbac-proxy-frr/0.log" Dec 13 07:38:46 crc kubenswrapper[4644]: I1213 07:38:46.042329 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/reloader/0.log" Dec 13 07:38:46 crc kubenswrapper[4644]: I1213 07:38:46.103706 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7784b6fcf-ld5c2_b2f2ae81-858e-49c7-9a13-00a35850e02d/frr-k8s-webhook-server/0.log" Dec 13 07:38:46 crc kubenswrapper[4644]: I1213 07:38:46.300967 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-855484b47c-v5c5q_9ad3f0c3-9ec1-43d4-8d56-16982c14ba46/manager/0.log" Dec 13 07:38:46 crc kubenswrapper[4644]: I1213 07:38:46.389655 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:38:46 crc kubenswrapper[4644]: E1213 07:38:46.390133 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:38:46 crc kubenswrapper[4644]: I1213 07:38:46.465684 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7dbf74bd54-pqt2v_0e78bd18-1038-45a4-989b-cd5fab723a89/webhook-server/0.log" Dec 13 07:38:46 crc kubenswrapper[4644]: I1213 07:38:46.520023 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-lzmmt_72b3e06f-eeaa-4db7-a2dd-ec97404219bb/kube-rbac-proxy/0.log" Dec 13 07:38:47 crc kubenswrapper[4644]: I1213 07:38:47.083673 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-lzmmt_72b3e06f-eeaa-4db7-a2dd-ec97404219bb/speaker/0.log" Dec 13 07:38:47 crc kubenswrapper[4644]: I1213 07:38:47.141068 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-n2vrv_e4fe3a30-e94f-40c6-b734-1385b3f14d4f/frr/0.log" Dec 13 07:38:56 crc kubenswrapper[4644]: I1213 07:38:56.855638 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht_b633d946-5f6e-4256-92ce-166f05f71f51/util/0.log" Dec 13 07:38:56 crc kubenswrapper[4644]: I1213 07:38:56.996947 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht_b633d946-5f6e-4256-92ce-166f05f71f51/util/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.042671 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht_b633d946-5f6e-4256-92ce-166f05f71f51/pull/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.061106 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht_b633d946-5f6e-4256-92ce-166f05f71f51/pull/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.193691 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht_b633d946-5f6e-4256-92ce-166f05f71f51/pull/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.222172 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht_b633d946-5f6e-4256-92ce-166f05f71f51/extract/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.232822 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4j74ht_b633d946-5f6e-4256-92ce-166f05f71f51/util/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.344668 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw_ef428abc-94fc-46e3-9668-d4ba73a82bf0/util/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.519432 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw_ef428abc-94fc-46e3-9668-d4ba73a82bf0/pull/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.527427 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw_ef428abc-94fc-46e3-9668-d4ba73a82bf0/pull/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.535508 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw_ef428abc-94fc-46e3-9668-d4ba73a82bf0/util/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.653157 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw_ef428abc-94fc-46e3-9668-d4ba73a82bf0/util/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.682579 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw_ef428abc-94fc-46e3-9668-d4ba73a82bf0/pull/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.707038 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa8sdnsw_ef428abc-94fc-46e3-9668-d4ba73a82bf0/extract/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.816869 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x79lz_26b8eb80-532a-4b48-8c6f-e68f835f94e0/extract-utilities/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.946347 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x79lz_26b8eb80-532a-4b48-8c6f-e68f835f94e0/extract-utilities/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.973181 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x79lz_26b8eb80-532a-4b48-8c6f-e68f835f94e0/extract-content/0.log" Dec 13 07:38:57 crc kubenswrapper[4644]: I1213 07:38:57.982994 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x79lz_26b8eb80-532a-4b48-8c6f-e68f835f94e0/extract-content/0.log" Dec 13 07:38:58 crc kubenswrapper[4644]: I1213 07:38:58.131563 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x79lz_26b8eb80-532a-4b48-8c6f-e68f835f94e0/extract-content/0.log" Dec 13 07:38:58 crc kubenswrapper[4644]: I1213 07:38:58.165166 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x79lz_26b8eb80-532a-4b48-8c6f-e68f835f94e0/extract-utilities/0.log" Dec 13 07:38:58 crc kubenswrapper[4644]: I1213 07:38:58.320018 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6fdth_2ec3a7e1-b616-4d72-8400-905ef791e7fc/extract-utilities/0.log" Dec 13 07:38:58 crc kubenswrapper[4644]: I1213 07:38:58.404903 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:38:58 crc kubenswrapper[4644]: E1213 07:38:58.405194 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:38:58 crc kubenswrapper[4644]: I1213 07:38:58.505403 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x79lz_26b8eb80-532a-4b48-8c6f-e68f835f94e0/registry-server/0.log" Dec 13 07:38:58 crc kubenswrapper[4644]: I1213 07:38:58.508935 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6fdth_2ec3a7e1-b616-4d72-8400-905ef791e7fc/extract-utilities/0.log" Dec 13 07:38:58 crc kubenswrapper[4644]: I1213 07:38:58.528860 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6fdth_2ec3a7e1-b616-4d72-8400-905ef791e7fc/extract-content/0.log" Dec 13 07:38:58 crc kubenswrapper[4644]: I1213 07:38:58.587464 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6fdth_2ec3a7e1-b616-4d72-8400-905ef791e7fc/extract-content/0.log" Dec 13 07:38:58 crc kubenswrapper[4644]: I1213 07:38:58.713964 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6fdth_2ec3a7e1-b616-4d72-8400-905ef791e7fc/extract-utilities/0.log" Dec 13 07:38:58 crc kubenswrapper[4644]: I1213 07:38:58.753540 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6fdth_2ec3a7e1-b616-4d72-8400-905ef791e7fc/extract-content/0.log" Dec 13 07:38:58 crc kubenswrapper[4644]: I1213 07:38:58.841546 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6fdth_2ec3a7e1-b616-4d72-8400-905ef791e7fc/registry-server/0.log" Dec 13 07:38:58 crc kubenswrapper[4644]: I1213 07:38:58.882398 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-gb9x7_33f05cad-7917-4d9a-870d-b68d4388bbde/marketplace-operator/3.log" Dec 13 07:38:58 crc kubenswrapper[4644]: I1213 07:38:58.976295 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-gb9x7_33f05cad-7917-4d9a-870d-b68d4388bbde/marketplace-operator/2.log" Dec 13 07:38:59 crc kubenswrapper[4644]: I1213 07:38:59.016169 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mfrqg_38f2631f-f251-4435-aa08-588b1586ae3a/extract-utilities/0.log" Dec 13 07:38:59 crc kubenswrapper[4644]: I1213 07:38:59.201189 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mfrqg_38f2631f-f251-4435-aa08-588b1586ae3a/extract-content/0.log" Dec 13 07:38:59 crc kubenswrapper[4644]: I1213 07:38:59.202250 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mfrqg_38f2631f-f251-4435-aa08-588b1586ae3a/extract-utilities/0.log" Dec 13 07:38:59 crc kubenswrapper[4644]: I1213 07:38:59.210621 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mfrqg_38f2631f-f251-4435-aa08-588b1586ae3a/extract-content/0.log" Dec 13 07:38:59 crc kubenswrapper[4644]: I1213 07:38:59.327948 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mfrqg_38f2631f-f251-4435-aa08-588b1586ae3a/extract-utilities/0.log" Dec 13 07:38:59 crc kubenswrapper[4644]: I1213 07:38:59.347971 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mfrqg_38f2631f-f251-4435-aa08-588b1586ae3a/extract-content/0.log" Dec 13 07:38:59 crc kubenswrapper[4644]: I1213 07:38:59.444645 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mfrqg_38f2631f-f251-4435-aa08-588b1586ae3a/registry-server/0.log" Dec 13 07:38:59 crc kubenswrapper[4644]: I1213 07:38:59.500212 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ng2d9_c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4/extract-utilities/0.log" Dec 13 07:38:59 crc kubenswrapper[4644]: I1213 07:38:59.651897 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ng2d9_c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4/extract-utilities/0.log" Dec 13 07:38:59 crc kubenswrapper[4644]: I1213 07:38:59.658550 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ng2d9_c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4/extract-content/0.log" Dec 13 07:38:59 crc kubenswrapper[4644]: I1213 07:38:59.700925 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ng2d9_c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4/extract-content/0.log" Dec 13 07:38:59 crc kubenswrapper[4644]: I1213 07:38:59.860336 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ng2d9_c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4/extract-utilities/0.log" Dec 13 07:38:59 crc kubenswrapper[4644]: I1213 07:38:59.887219 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ng2d9_c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4/extract-content/0.log" Dec 13 07:39:00 crc kubenswrapper[4644]: I1213 07:39:00.172239 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ng2d9_c08b5ba1-ef9d-40e0-a043-2d1e87e8f7b4/registry-server/0.log" Dec 13 07:39:10 crc kubenswrapper[4644]: I1213 07:39:10.389061 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:39:10 crc kubenswrapper[4644]: E1213 07:39:10.390658 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:39:16 crc kubenswrapper[4644]: E1213 07:39:16.282193 4644 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.25.89:47964->192.168.25.89:43865: write tcp 192.168.25.89:47964->192.168.25.89:43865: write: broken pipe Dec 13 07:39:19 crc kubenswrapper[4644]: E1213 07:39:19.396247 4644 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \"http://38.129.56.153:5001/v2/\": dial tcp 38.129.56.153:5001: i/o timeout" image="38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca" Dec 13 07:39:19 crc kubenswrapper[4644]: E1213 07:39:19.396556 4644 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \"http://38.129.56.153:5001/v2/\": dial tcp 38.129.56.153:5001: i/o timeout" image="38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca" Dec 13 07:39:19 crc kubenswrapper[4644]: E1213 07:39:19.396691 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kvtgd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-9fc9c756c-8sjtq_openstack-operators(511296bd-fff8-49c1-bbfd-b702905f6e83): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \"http://38.129.56.153:5001/v2/\": dial tcp 38.129.56.153:5001: i/o timeout" logger="UnhandledError" Dec 13 07:39:19 crc kubenswrapper[4644]: E1213 07:39:19.397883 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \\\"http://38.129.56.153:5001/v2/\\\": dial tcp 38.129.56.153:5001: i/o timeout\"" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" podUID="511296bd-fff8-49c1-bbfd-b702905f6e83" Dec 13 07:39:23 crc kubenswrapper[4644]: I1213 07:39:23.390144 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:39:23 crc kubenswrapper[4644]: E1213 07:39:23.391098 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:39:34 crc kubenswrapper[4644]: E1213 07:39:34.391922 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca\\\"\"" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" podUID="511296bd-fff8-49c1-bbfd-b702905f6e83" Dec 13 07:39:36 crc kubenswrapper[4644]: I1213 07:39:36.033510 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-a410-account-create-update-6z7z6"] Dec 13 07:39:36 crc kubenswrapper[4644]: I1213 07:39:36.040401 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-zkrcn"] Dec 13 07:39:36 crc kubenswrapper[4644]: I1213 07:39:36.046773 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-zkrcn"] Dec 13 07:39:36 crc kubenswrapper[4644]: I1213 07:39:36.052344 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-a410-account-create-update-6z7z6"] Dec 13 07:39:36 crc kubenswrapper[4644]: I1213 07:39:36.398463 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c34af67-d12c-49be-beed-0f83e5faa134" path="/var/lib/kubelet/pods/6c34af67-d12c-49be-beed-0f83e5faa134/volumes" Dec 13 07:39:36 crc kubenswrapper[4644]: I1213 07:39:36.398991 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5" path="/var/lib/kubelet/pods/d1246aa1-7416-4cc9-bb7d-55f5e3d9fca5/volumes" Dec 13 07:39:38 crc kubenswrapper[4644]: I1213 07:39:38.394360 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:39:38 crc kubenswrapper[4644]: E1213 07:39:38.394891 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:39:43 crc kubenswrapper[4644]: I1213 07:39:43.735042 4644 scope.go:117] "RemoveContainer" containerID="a8cb62ef866ff5bed5585c29ae1959f5da9757d2c4b154db02574ee882f41400" Dec 13 07:39:43 crc kubenswrapper[4644]: I1213 07:39:43.754393 4644 scope.go:117] "RemoveContainer" containerID="085bcebe1a4c25c261997f2a8f5e6b575910d73c97d03547623ffa2ca72d4fda" Dec 13 07:39:48 crc kubenswrapper[4644]: E1213 07:39:48.400580 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca\\\"\"" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" podUID="511296bd-fff8-49c1-bbfd-b702905f6e83" Dec 13 07:39:49 crc kubenswrapper[4644]: I1213 07:39:49.389491 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:39:49 crc kubenswrapper[4644]: E1213 07:39:49.390160 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:39:50 crc kubenswrapper[4644]: I1213 07:39:50.024567 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-6vs42"] Dec 13 07:39:50 crc kubenswrapper[4644]: I1213 07:39:50.030876 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-6vs42"] Dec 13 07:39:50 crc kubenswrapper[4644]: I1213 07:39:50.400002 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d158e5b2-13e4-4276-aba2-d5f061a842af" path="/var/lib/kubelet/pods/d158e5b2-13e4-4276-aba2-d5f061a842af/volumes" Dec 13 07:40:01 crc kubenswrapper[4644]: I1213 07:40:01.391883 4644 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 07:40:03 crc kubenswrapper[4644]: I1213 07:40:03.389378 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:40:03 crc kubenswrapper[4644]: E1213 07:40:03.389918 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:40:14 crc kubenswrapper[4644]: I1213 07:40:14.388788 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:40:14 crc kubenswrapper[4644]: E1213 07:40:14.389318 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:40:16 crc kubenswrapper[4644]: I1213 07:40:16.239317 4644 generic.go:334] "Generic (PLEG): container finished" podID="fc6552aa-3834-4872-ac30-8196fd0dc80b" containerID="1bfa9a7344124006c021081657f55b79f2c2aaf04c88b452ef76dbb1698bee83" exitCode=0 Dec 13 07:40:16 crc kubenswrapper[4644]: I1213 07:40:16.239350 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tftvv/must-gather-z4kh6" event={"ID":"fc6552aa-3834-4872-ac30-8196fd0dc80b","Type":"ContainerDied","Data":"1bfa9a7344124006c021081657f55b79f2c2aaf04c88b452ef76dbb1698bee83"} Dec 13 07:40:16 crc kubenswrapper[4644]: I1213 07:40:16.240109 4644 scope.go:117] "RemoveContainer" containerID="1bfa9a7344124006c021081657f55b79f2c2aaf04c88b452ef76dbb1698bee83" Dec 13 07:40:16 crc kubenswrapper[4644]: I1213 07:40:16.534833 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-tftvv_must-gather-z4kh6_fc6552aa-3834-4872-ac30-8196fd0dc80b/gather/0.log" Dec 13 07:40:24 crc kubenswrapper[4644]: I1213 07:40:24.222896 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-tftvv/must-gather-z4kh6"] Dec 13 07:40:24 crc kubenswrapper[4644]: I1213 07:40:24.223434 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-tftvv/must-gather-z4kh6" podUID="fc6552aa-3834-4872-ac30-8196fd0dc80b" containerName="copy" containerID="cri-o://83335dcb2c9c084900b6cea6d3e4e181606501165f1986ee83eee3208a5ba719" gracePeriod=2 Dec 13 07:40:24 crc kubenswrapper[4644]: I1213 07:40:24.229236 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-tftvv/must-gather-z4kh6"] Dec 13 07:40:24 crc kubenswrapper[4644]: I1213 07:40:24.590648 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-tftvv_must-gather-z4kh6_fc6552aa-3834-4872-ac30-8196fd0dc80b/copy/0.log" Dec 13 07:40:24 crc kubenswrapper[4644]: I1213 07:40:24.591155 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tftvv/must-gather-z4kh6" Dec 13 07:40:24 crc kubenswrapper[4644]: I1213 07:40:24.781316 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/fc6552aa-3834-4872-ac30-8196fd0dc80b-must-gather-output\") pod \"fc6552aa-3834-4872-ac30-8196fd0dc80b\" (UID: \"fc6552aa-3834-4872-ac30-8196fd0dc80b\") " Dec 13 07:40:24 crc kubenswrapper[4644]: I1213 07:40:24.782158 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnlmb\" (UniqueName: \"kubernetes.io/projected/fc6552aa-3834-4872-ac30-8196fd0dc80b-kube-api-access-xnlmb\") pod \"fc6552aa-3834-4872-ac30-8196fd0dc80b\" (UID: \"fc6552aa-3834-4872-ac30-8196fd0dc80b\") " Dec 13 07:40:24 crc kubenswrapper[4644]: I1213 07:40:24.786514 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc6552aa-3834-4872-ac30-8196fd0dc80b-kube-api-access-xnlmb" (OuterVolumeSpecName: "kube-api-access-xnlmb") pod "fc6552aa-3834-4872-ac30-8196fd0dc80b" (UID: "fc6552aa-3834-4872-ac30-8196fd0dc80b"). InnerVolumeSpecName "kube-api-access-xnlmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:40:24 crc kubenswrapper[4644]: I1213 07:40:24.885274 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnlmb\" (UniqueName: \"kubernetes.io/projected/fc6552aa-3834-4872-ac30-8196fd0dc80b-kube-api-access-xnlmb\") on node \"crc\" DevicePath \"\"" Dec 13 07:40:24 crc kubenswrapper[4644]: I1213 07:40:24.888115 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc6552aa-3834-4872-ac30-8196fd0dc80b-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "fc6552aa-3834-4872-ac30-8196fd0dc80b" (UID: "fc6552aa-3834-4872-ac30-8196fd0dc80b"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:40:24 crc kubenswrapper[4644]: I1213 07:40:24.987413 4644 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/fc6552aa-3834-4872-ac30-8196fd0dc80b-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 13 07:40:25 crc kubenswrapper[4644]: I1213 07:40:25.295889 4644 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-tftvv_must-gather-z4kh6_fc6552aa-3834-4872-ac30-8196fd0dc80b/copy/0.log" Dec 13 07:40:25 crc kubenswrapper[4644]: I1213 07:40:25.296559 4644 generic.go:334] "Generic (PLEG): container finished" podID="fc6552aa-3834-4872-ac30-8196fd0dc80b" containerID="83335dcb2c9c084900b6cea6d3e4e181606501165f1986ee83eee3208a5ba719" exitCode=143 Dec 13 07:40:25 crc kubenswrapper[4644]: I1213 07:40:25.296604 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tftvv/must-gather-z4kh6" Dec 13 07:40:25 crc kubenswrapper[4644]: I1213 07:40:25.296616 4644 scope.go:117] "RemoveContainer" containerID="83335dcb2c9c084900b6cea6d3e4e181606501165f1986ee83eee3208a5ba719" Dec 13 07:40:25 crc kubenswrapper[4644]: I1213 07:40:25.312288 4644 scope.go:117] "RemoveContainer" containerID="1bfa9a7344124006c021081657f55b79f2c2aaf04c88b452ef76dbb1698bee83" Dec 13 07:40:25 crc kubenswrapper[4644]: I1213 07:40:25.370587 4644 scope.go:117] "RemoveContainer" containerID="83335dcb2c9c084900b6cea6d3e4e181606501165f1986ee83eee3208a5ba719" Dec 13 07:40:25 crc kubenswrapper[4644]: E1213 07:40:25.370897 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83335dcb2c9c084900b6cea6d3e4e181606501165f1986ee83eee3208a5ba719\": container with ID starting with 83335dcb2c9c084900b6cea6d3e4e181606501165f1986ee83eee3208a5ba719 not found: ID does not exist" containerID="83335dcb2c9c084900b6cea6d3e4e181606501165f1986ee83eee3208a5ba719" Dec 13 07:40:25 crc kubenswrapper[4644]: I1213 07:40:25.370929 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83335dcb2c9c084900b6cea6d3e4e181606501165f1986ee83eee3208a5ba719"} err="failed to get container status \"83335dcb2c9c084900b6cea6d3e4e181606501165f1986ee83eee3208a5ba719\": rpc error: code = NotFound desc = could not find container \"83335dcb2c9c084900b6cea6d3e4e181606501165f1986ee83eee3208a5ba719\": container with ID starting with 83335dcb2c9c084900b6cea6d3e4e181606501165f1986ee83eee3208a5ba719 not found: ID does not exist" Dec 13 07:40:25 crc kubenswrapper[4644]: I1213 07:40:25.370947 4644 scope.go:117] "RemoveContainer" containerID="1bfa9a7344124006c021081657f55b79f2c2aaf04c88b452ef76dbb1698bee83" Dec 13 07:40:25 crc kubenswrapper[4644]: E1213 07:40:25.371189 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bfa9a7344124006c021081657f55b79f2c2aaf04c88b452ef76dbb1698bee83\": container with ID starting with 1bfa9a7344124006c021081657f55b79f2c2aaf04c88b452ef76dbb1698bee83 not found: ID does not exist" containerID="1bfa9a7344124006c021081657f55b79f2c2aaf04c88b452ef76dbb1698bee83" Dec 13 07:40:25 crc kubenswrapper[4644]: I1213 07:40:25.371211 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bfa9a7344124006c021081657f55b79f2c2aaf04c88b452ef76dbb1698bee83"} err="failed to get container status \"1bfa9a7344124006c021081657f55b79f2c2aaf04c88b452ef76dbb1698bee83\": rpc error: code = NotFound desc = could not find container \"1bfa9a7344124006c021081657f55b79f2c2aaf04c88b452ef76dbb1698bee83\": container with ID starting with 1bfa9a7344124006c021081657f55b79f2c2aaf04c88b452ef76dbb1698bee83 not found: ID does not exist" Dec 13 07:40:26 crc kubenswrapper[4644]: I1213 07:40:26.397696 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc6552aa-3834-4872-ac30-8196fd0dc80b" path="/var/lib/kubelet/pods/fc6552aa-3834-4872-ac30-8196fd0dc80b/volumes" Dec 13 07:40:28 crc kubenswrapper[4644]: I1213 07:40:28.394391 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:40:28 crc kubenswrapper[4644]: E1213 07:40:28.395187 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-45tj4_openshift-machine-config-operator(48240f19-087e-4597-b448-ab1a190a5027)\"" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" Dec 13 07:40:41 crc kubenswrapper[4644]: I1213 07:40:41.390437 4644 scope.go:117] "RemoveContainer" containerID="81aa6bce22cde7bfae8043cd52cc81c5fac61ae76638a5ae259daa8a50c34428" Dec 13 07:40:42 crc kubenswrapper[4644]: I1213 07:40:42.411080 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" event={"ID":"48240f19-087e-4597-b448-ab1a190a5027","Type":"ContainerStarted","Data":"53742e2ed31929ee3a82d7cc44521034df1a9abbb6b06401a9b3184aa72e4c5d"} Dec 13 07:40:43 crc kubenswrapper[4644]: I1213 07:40:43.825810 4644 scope.go:117] "RemoveContainer" containerID="f65b897336173d2e65e48b5b56b8fc0589295ab5f0afc7639a23b23002d8b069" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.680933 4644 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2ktpv"] Dec 13 07:41:15 crc kubenswrapper[4644]: E1213 07:41:15.682780 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0a078e0-da55-4798-973b-2cc8b6a09c90" containerName="container-00" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.682810 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0a078e0-da55-4798-973b-2cc8b6a09c90" containerName="container-00" Dec 13 07:41:15 crc kubenswrapper[4644]: E1213 07:41:15.682831 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc6552aa-3834-4872-ac30-8196fd0dc80b" containerName="gather" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.682838 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc6552aa-3834-4872-ac30-8196fd0dc80b" containerName="gather" Dec 13 07:41:15 crc kubenswrapper[4644]: E1213 07:41:15.682846 4644 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc6552aa-3834-4872-ac30-8196fd0dc80b" containerName="copy" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.682851 4644 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc6552aa-3834-4872-ac30-8196fd0dc80b" containerName="copy" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.683078 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0a078e0-da55-4798-973b-2cc8b6a09c90" containerName="container-00" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.683099 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc6552aa-3834-4872-ac30-8196fd0dc80b" containerName="gather" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.683111 4644 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc6552aa-3834-4872-ac30-8196fd0dc80b" containerName="copy" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.684295 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.688331 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ktpv"] Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.791650 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6812c04-8df4-4165-b461-6e3d3c4e1e20-catalog-content\") pod \"redhat-marketplace-2ktpv\" (UID: \"e6812c04-8df4-4165-b461-6e3d3c4e1e20\") " pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.791827 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6812c04-8df4-4165-b461-6e3d3c4e1e20-utilities\") pod \"redhat-marketplace-2ktpv\" (UID: \"e6812c04-8df4-4165-b461-6e3d3c4e1e20\") " pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.791856 4644 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xkcl\" (UniqueName: \"kubernetes.io/projected/e6812c04-8df4-4165-b461-6e3d3c4e1e20-kube-api-access-2xkcl\") pod \"redhat-marketplace-2ktpv\" (UID: \"e6812c04-8df4-4165-b461-6e3d3c4e1e20\") " pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.898314 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6812c04-8df4-4165-b461-6e3d3c4e1e20-catalog-content\") pod \"redhat-marketplace-2ktpv\" (UID: \"e6812c04-8df4-4165-b461-6e3d3c4e1e20\") " pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.898626 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6812c04-8df4-4165-b461-6e3d3c4e1e20-utilities\") pod \"redhat-marketplace-2ktpv\" (UID: \"e6812c04-8df4-4165-b461-6e3d3c4e1e20\") " pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.898673 4644 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xkcl\" (UniqueName: \"kubernetes.io/projected/e6812c04-8df4-4165-b461-6e3d3c4e1e20-kube-api-access-2xkcl\") pod \"redhat-marketplace-2ktpv\" (UID: \"e6812c04-8df4-4165-b461-6e3d3c4e1e20\") " pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.899422 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6812c04-8df4-4165-b461-6e3d3c4e1e20-catalog-content\") pod \"redhat-marketplace-2ktpv\" (UID: \"e6812c04-8df4-4165-b461-6e3d3c4e1e20\") " pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.899459 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6812c04-8df4-4165-b461-6e3d3c4e1e20-utilities\") pod \"redhat-marketplace-2ktpv\" (UID: \"e6812c04-8df4-4165-b461-6e3d3c4e1e20\") " pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:15 crc kubenswrapper[4644]: I1213 07:41:15.915330 4644 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xkcl\" (UniqueName: \"kubernetes.io/projected/e6812c04-8df4-4165-b461-6e3d3c4e1e20-kube-api-access-2xkcl\") pod \"redhat-marketplace-2ktpv\" (UID: \"e6812c04-8df4-4165-b461-6e3d3c4e1e20\") " pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:16 crc kubenswrapper[4644]: I1213 07:41:16.001007 4644 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:16 crc kubenswrapper[4644]: I1213 07:41:16.377933 4644 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ktpv"] Dec 13 07:41:16 crc kubenswrapper[4644]: I1213 07:41:16.624962 4644 generic.go:334] "Generic (PLEG): container finished" podID="e6812c04-8df4-4165-b461-6e3d3c4e1e20" containerID="1d2bd448f149f14247e38aed3f3df2e882b71639d4412aa3a41b0385add49588" exitCode=0 Dec 13 07:41:16 crc kubenswrapper[4644]: I1213 07:41:16.625013 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ktpv" event={"ID":"e6812c04-8df4-4165-b461-6e3d3c4e1e20","Type":"ContainerDied","Data":"1d2bd448f149f14247e38aed3f3df2e882b71639d4412aa3a41b0385add49588"} Dec 13 07:41:16 crc kubenswrapper[4644]: I1213 07:41:16.625062 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ktpv" event={"ID":"e6812c04-8df4-4165-b461-6e3d3c4e1e20","Type":"ContainerStarted","Data":"fdaa534392780c5b0bf035f1cc54a3b9ca004847195d951dc3b85e901a5e7b81"} Dec 13 07:41:17 crc kubenswrapper[4644]: I1213 07:41:17.632957 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ktpv" event={"ID":"e6812c04-8df4-4165-b461-6e3d3c4e1e20","Type":"ContainerStarted","Data":"b23e91a96b7832caefadf2dd5be238148c5d8a92f0d944f925d60b252733cb81"} Dec 13 07:41:18 crc kubenswrapper[4644]: I1213 07:41:18.641655 4644 generic.go:334] "Generic (PLEG): container finished" podID="e6812c04-8df4-4165-b461-6e3d3c4e1e20" containerID="b23e91a96b7832caefadf2dd5be238148c5d8a92f0d944f925d60b252733cb81" exitCode=0 Dec 13 07:41:18 crc kubenswrapper[4644]: I1213 07:41:18.641695 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ktpv" event={"ID":"e6812c04-8df4-4165-b461-6e3d3c4e1e20","Type":"ContainerDied","Data":"b23e91a96b7832caefadf2dd5be238148c5d8a92f0d944f925d60b252733cb81"} Dec 13 07:41:19 crc kubenswrapper[4644]: I1213 07:41:19.649506 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ktpv" event={"ID":"e6812c04-8df4-4165-b461-6e3d3c4e1e20","Type":"ContainerStarted","Data":"f9487281365573be3a981d1337da4d416837e6b3fffcec557a10b919fdda06c6"} Dec 13 07:41:19 crc kubenswrapper[4644]: I1213 07:41:19.672492 4644 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2ktpv" podStartSLOduration=2.151627005 podStartE2EDuration="4.672476363s" podCreationTimestamp="2025-12-13 07:41:15 +0000 UTC" firstStartedPulling="2025-12-13 07:41:16.62646648 +0000 UTC m=+3338.841417313" lastFinishedPulling="2025-12-13 07:41:19.147315839 +0000 UTC m=+3341.362266671" observedRunningTime="2025-12-13 07:41:19.666659996 +0000 UTC m=+3341.881610829" watchObservedRunningTime="2025-12-13 07:41:19.672476363 +0000 UTC m=+3341.887427196" Dec 13 07:41:26 crc kubenswrapper[4644]: I1213 07:41:26.001952 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:26 crc kubenswrapper[4644]: I1213 07:41:26.002520 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:26 crc kubenswrapper[4644]: I1213 07:41:26.039865 4644 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:26 crc kubenswrapper[4644]: I1213 07:41:26.735595 4644 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:26 crc kubenswrapper[4644]: I1213 07:41:26.773948 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ktpv"] Dec 13 07:41:28 crc kubenswrapper[4644]: I1213 07:41:28.717110 4644 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2ktpv" podUID="e6812c04-8df4-4165-b461-6e3d3c4e1e20" containerName="registry-server" containerID="cri-o://f9487281365573be3a981d1337da4d416837e6b3fffcec557a10b919fdda06c6" gracePeriod=2 Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.094104 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.280065 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6812c04-8df4-4165-b461-6e3d3c4e1e20-utilities\") pod \"e6812c04-8df4-4165-b461-6e3d3c4e1e20\" (UID: \"e6812c04-8df4-4165-b461-6e3d3c4e1e20\") " Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.280266 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6812c04-8df4-4165-b461-6e3d3c4e1e20-catalog-content\") pod \"e6812c04-8df4-4165-b461-6e3d3c4e1e20\" (UID: \"e6812c04-8df4-4165-b461-6e3d3c4e1e20\") " Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.280322 4644 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xkcl\" (UniqueName: \"kubernetes.io/projected/e6812c04-8df4-4165-b461-6e3d3c4e1e20-kube-api-access-2xkcl\") pod \"e6812c04-8df4-4165-b461-6e3d3c4e1e20\" (UID: \"e6812c04-8df4-4165-b461-6e3d3c4e1e20\") " Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.281143 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6812c04-8df4-4165-b461-6e3d3c4e1e20-utilities" (OuterVolumeSpecName: "utilities") pod "e6812c04-8df4-4165-b461-6e3d3c4e1e20" (UID: "e6812c04-8df4-4165-b461-6e3d3c4e1e20"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.282670 4644 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6812c04-8df4-4165-b461-6e3d3c4e1e20-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.292391 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6812c04-8df4-4165-b461-6e3d3c4e1e20-kube-api-access-2xkcl" (OuterVolumeSpecName: "kube-api-access-2xkcl") pod "e6812c04-8df4-4165-b461-6e3d3c4e1e20" (UID: "e6812c04-8df4-4165-b461-6e3d3c4e1e20"). InnerVolumeSpecName "kube-api-access-2xkcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.297159 4644 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6812c04-8df4-4165-b461-6e3d3c4e1e20-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e6812c04-8df4-4165-b461-6e3d3c4e1e20" (UID: "e6812c04-8df4-4165-b461-6e3d3c4e1e20"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.384614 4644 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xkcl\" (UniqueName: \"kubernetes.io/projected/e6812c04-8df4-4165-b461-6e3d3c4e1e20-kube-api-access-2xkcl\") on node \"crc\" DevicePath \"\"" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.384665 4644 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6812c04-8df4-4165-b461-6e3d3c4e1e20-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.726111 4644 generic.go:334] "Generic (PLEG): container finished" podID="e6812c04-8df4-4165-b461-6e3d3c4e1e20" containerID="f9487281365573be3a981d1337da4d416837e6b3fffcec557a10b919fdda06c6" exitCode=0 Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.726149 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ktpv" event={"ID":"e6812c04-8df4-4165-b461-6e3d3c4e1e20","Type":"ContainerDied","Data":"f9487281365573be3a981d1337da4d416837e6b3fffcec557a10b919fdda06c6"} Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.726187 4644 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ktpv" event={"ID":"e6812c04-8df4-4165-b461-6e3d3c4e1e20","Type":"ContainerDied","Data":"fdaa534392780c5b0bf035f1cc54a3b9ca004847195d951dc3b85e901a5e7b81"} Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.726183 4644 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ktpv" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.726199 4644 scope.go:117] "RemoveContainer" containerID="f9487281365573be3a981d1337da4d416837e6b3fffcec557a10b919fdda06c6" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.741706 4644 scope.go:117] "RemoveContainer" containerID="b23e91a96b7832caefadf2dd5be238148c5d8a92f0d944f925d60b252733cb81" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.750353 4644 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ktpv"] Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.758859 4644 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ktpv"] Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.781503 4644 scope.go:117] "RemoveContainer" containerID="1d2bd448f149f14247e38aed3f3df2e882b71639d4412aa3a41b0385add49588" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.795563 4644 scope.go:117] "RemoveContainer" containerID="f9487281365573be3a981d1337da4d416837e6b3fffcec557a10b919fdda06c6" Dec 13 07:41:29 crc kubenswrapper[4644]: E1213 07:41:29.795788 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9487281365573be3a981d1337da4d416837e6b3fffcec557a10b919fdda06c6\": container with ID starting with f9487281365573be3a981d1337da4d416837e6b3fffcec557a10b919fdda06c6 not found: ID does not exist" containerID="f9487281365573be3a981d1337da4d416837e6b3fffcec557a10b919fdda06c6" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.795822 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9487281365573be3a981d1337da4d416837e6b3fffcec557a10b919fdda06c6"} err="failed to get container status \"f9487281365573be3a981d1337da4d416837e6b3fffcec557a10b919fdda06c6\": rpc error: code = NotFound desc = could not find container \"f9487281365573be3a981d1337da4d416837e6b3fffcec557a10b919fdda06c6\": container with ID starting with f9487281365573be3a981d1337da4d416837e6b3fffcec557a10b919fdda06c6 not found: ID does not exist" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.795844 4644 scope.go:117] "RemoveContainer" containerID="b23e91a96b7832caefadf2dd5be238148c5d8a92f0d944f925d60b252733cb81" Dec 13 07:41:29 crc kubenswrapper[4644]: E1213 07:41:29.796167 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b23e91a96b7832caefadf2dd5be238148c5d8a92f0d944f925d60b252733cb81\": container with ID starting with b23e91a96b7832caefadf2dd5be238148c5d8a92f0d944f925d60b252733cb81 not found: ID does not exist" containerID="b23e91a96b7832caefadf2dd5be238148c5d8a92f0d944f925d60b252733cb81" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.796196 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b23e91a96b7832caefadf2dd5be238148c5d8a92f0d944f925d60b252733cb81"} err="failed to get container status \"b23e91a96b7832caefadf2dd5be238148c5d8a92f0d944f925d60b252733cb81\": rpc error: code = NotFound desc = could not find container \"b23e91a96b7832caefadf2dd5be238148c5d8a92f0d944f925d60b252733cb81\": container with ID starting with b23e91a96b7832caefadf2dd5be238148c5d8a92f0d944f925d60b252733cb81 not found: ID does not exist" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.796216 4644 scope.go:117] "RemoveContainer" containerID="1d2bd448f149f14247e38aed3f3df2e882b71639d4412aa3a41b0385add49588" Dec 13 07:41:29 crc kubenswrapper[4644]: E1213 07:41:29.796623 4644 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d2bd448f149f14247e38aed3f3df2e882b71639d4412aa3a41b0385add49588\": container with ID starting with 1d2bd448f149f14247e38aed3f3df2e882b71639d4412aa3a41b0385add49588 not found: ID does not exist" containerID="1d2bd448f149f14247e38aed3f3df2e882b71639d4412aa3a41b0385add49588" Dec 13 07:41:29 crc kubenswrapper[4644]: I1213 07:41:29.796669 4644 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d2bd448f149f14247e38aed3f3df2e882b71639d4412aa3a41b0385add49588"} err="failed to get container status \"1d2bd448f149f14247e38aed3f3df2e882b71639d4412aa3a41b0385add49588\": rpc error: code = NotFound desc = could not find container \"1d2bd448f149f14247e38aed3f3df2e882b71639d4412aa3a41b0385add49588\": container with ID starting with 1d2bd448f149f14247e38aed3f3df2e882b71639d4412aa3a41b0385add49588 not found: ID does not exist" Dec 13 07:41:30 crc kubenswrapper[4644]: I1213 07:41:30.396763 4644 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6812c04-8df4-4165-b461-6e3d3c4e1e20" path="/var/lib/kubelet/pods/e6812c04-8df4-4165-b461-6e3d3c4e1e20/volumes" Dec 13 07:42:01 crc kubenswrapper[4644]: E1213 07:42:01.396673 4644 log.go:32] "PullImage from image service failed" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \"http://38.129.56.153:5001/v2/\": dial tcp 38.129.56.153:5001: i/o timeout" image="38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca" Dec 13 07:42:01 crc kubenswrapper[4644]: E1213 07:42:01.397042 4644 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \"http://38.129.56.153:5001/v2/\": dial tcp 38.129.56.153:5001: i/o timeout" image="38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca" Dec 13 07:42:01 crc kubenswrapper[4644]: E1213 07:42:01.397165 4644 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kvtgd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-9fc9c756c-8sjtq_openstack-operators(511296bd-fff8-49c1-bbfd-b702905f6e83): ErrImagePull: rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \"http://38.129.56.153:5001/v2/\": dial tcp 38.129.56.153:5001: i/o timeout" logger="UnhandledError" Dec 13 07:42:01 crc kubenswrapper[4644]: E1213 07:42:01.398265 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = DeadlineExceeded desc = initializing source docker://38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca: pinging container registry 38.129.56.153:5001: Get \\\"http://38.129.56.153:5001/v2/\\\": dial tcp 38.129.56.153:5001: i/o timeout\"" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" podUID="511296bd-fff8-49c1-bbfd-b702905f6e83" Dec 13 07:42:12 crc kubenswrapper[4644]: E1213 07:42:12.391252 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca\\\"\"" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" podUID="511296bd-fff8-49c1-bbfd-b702905f6e83" Dec 13 07:42:23 crc kubenswrapper[4644]: E1213 07:42:23.390919 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca\\\"\"" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" podUID="511296bd-fff8-49c1-bbfd-b702905f6e83" Dec 13 07:42:36 crc kubenswrapper[4644]: E1213 07:42:36.390581 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca\\\"\"" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" podUID="511296bd-fff8-49c1-bbfd-b702905f6e83" Dec 13 07:42:49 crc kubenswrapper[4644]: E1213 07:42:49.390782 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca\\\"\"" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" podUID="511296bd-fff8-49c1-bbfd-b702905f6e83" Dec 13 07:43:00 crc kubenswrapper[4644]: E1213 07:43:00.390320 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca\\\"\"" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" podUID="511296bd-fff8-49c1-bbfd-b702905f6e83" Dec 13 07:43:09 crc kubenswrapper[4644]: I1213 07:43:09.753805 4644 patch_prober.go:28] interesting pod/machine-config-daemon-45tj4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:43:09 crc kubenswrapper[4644]: I1213 07:43:09.754421 4644 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-45tj4" podUID="48240f19-087e-4597-b448-ab1a190a5027" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:43:11 crc kubenswrapper[4644]: E1213 07:43:11.391864 4644 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.129.56.153:5001/openstack-k8s-operators/test-operator:d19f803f400b92d4afd97dd749e753a7435bfaca\\\"\"" pod="openstack-operators/test-operator-controller-manager-9fc9c756c-8sjtq" podUID="511296bd-fff8-49c1-bbfd-b702905f6e83" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515117214452024447 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015117214453017365 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015117205363016510 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015117205363015460 5ustar corecore